1#!/usr/bin/env python3
2# Copyright (c) 2023 Google LLC
3#
4# SPDX-License-Identifier: Apache-2.0
5"""
6Tests for runner.py classes
7"""
8
9import errno
10import mock
11import os
12import pathlib
13import pytest
14import queue
15import re
16import subprocess
17import sys
18import yaml
19
20from contextlib import nullcontext
21from elftools.elf.sections import SymbolTableSection
22from typing import List
23
24ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
25sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
26
27from twisterlib.statuses import TwisterStatus
28from twisterlib.error import BuildError
29from twisterlib.harness import Pytest
30
31from twisterlib.runner import (
32    CMake,
33    ExecutionCounter,
34    FilterBuilder,
35    ProjectBuilder,
36    TwisterRunner
37)
38
39@pytest.fixture
40def mocked_instance(tmp_path):
41    instance = mock.Mock()
42    testsuite = mock.Mock()
43    testsuite.source_dir: str = ''
44    instance.testsuite = testsuite
45    platform = mock.Mock()
46    platform.sysbuild = False
47    platform.binaries: List[str] = []
48    instance.platform = platform
49    build_dir = tmp_path / 'build_dir'
50    os.makedirs(build_dir)
51    instance.build_dir: str = str(build_dir)
52    return instance
53
54
55@pytest.fixture
56def mocked_env():
57    env = mock.Mock()
58    options = mock.Mock()
59    options.verbose = 2
60    env.options = options
61    return env
62
63
64@pytest.fixture
65def mocked_jobserver():
66    jobserver = mock.Mock()
67    return jobserver
68
69
70@pytest.fixture
71def project_builder(mocked_instance, mocked_env, mocked_jobserver) -> ProjectBuilder:
72    project_builder = ProjectBuilder(mocked_instance, mocked_env, mocked_jobserver)
73    return project_builder
74
75
76@pytest.fixture
77def runners(project_builder: ProjectBuilder) -> dict:
78    """
79    Create runners.yaml file in build_dir/zephyr directory and return file
80    content as dict.
81    """
82    build_dir_zephyr_path = os.path.join(project_builder.instance.build_dir, 'zephyr')
83    os.makedirs(build_dir_zephyr_path)
84    runners_file_path = os.path.join(build_dir_zephyr_path, 'runners.yaml')
85    runners_content: dict = {
86        'config': {
87            'elf_file': 'zephyr.elf',
88            'hex_file': os.path.join(build_dir_zephyr_path, 'zephyr.elf'),
89            'bin_file': 'zephyr.bin',
90        }
91    }
92    with open(runners_file_path, 'w') as file:
93        yaml.dump(runners_content, file)
94
95    return runners_content
96
97
98@mock.patch("os.path.exists")
99def test_projectbuilder_cmake_assemble_args_single(m):
100    # Causes the additional_overlay_path to be appended
101    m.return_value = True
102
103    class MockHandler:
104        pass
105
106    handler = MockHandler()
107    handler.args = ["handler_arg1", "handler_arg2"]
108    handler.ready = True
109
110    assert(ProjectBuilder.cmake_assemble_args(
111        ["basearg1", "CONFIG_t=\"test\"", "SNIPPET_t=\"test\""],
112        handler,
113        ["a.conf;b.conf", "c.conf"],
114        ["extra_overlay.conf"],
115        ["x.overlay;y.overlay", "z.overlay"],
116        ["cmake1=foo", "cmake2=bar"],
117        "/builddir/",
118    ) == [
119        "-DCONFIG_t=\"test\"",
120        "-Dcmake1=foo", "-Dcmake2=bar",
121        "-Dbasearg1", "-DSNIPPET_t=test",
122        "-Dhandler_arg1", "-Dhandler_arg2",
123        "-DCONF_FILE=a.conf;b.conf;c.conf",
124        "-DDTC_OVERLAY_FILE=x.overlay;y.overlay;z.overlay",
125        "-DOVERLAY_CONFIG=extra_overlay.conf "
126        "/builddir/twister/testsuite_extra.conf",
127    ])
128
129
130def test_if_default_binaries_are_taken_properly(project_builder: ProjectBuilder):
131    default_binaries = [
132        os.path.join('zephyr', 'zephyr.hex'),
133        os.path.join('zephyr', 'zephyr.bin'),
134        os.path.join('zephyr', 'zephyr.elf'),
135        os.path.join('zephyr', 'zephyr.exe'),
136    ]
137    project_builder.instance.sysbuild = False
138    binaries = project_builder._get_binaries()
139    assert sorted(binaries) == sorted(default_binaries)
140
141
142def test_if_binaries_from_platform_are_taken_properly(project_builder: ProjectBuilder):
143    platform_binaries = ['spi_image.bin']
144    project_builder.platform.binaries = platform_binaries
145    project_builder.instance.sysbuild = False
146    platform_binaries_expected = [os.path.join('zephyr', bin) for bin in platform_binaries]
147    binaries = project_builder._get_binaries()
148    assert sorted(binaries) == sorted(platform_binaries_expected)
149
150
151def test_if_binaries_from_runners_are_taken_properly(runners, project_builder: ProjectBuilder):
152    runners_binaries = list(runners['config'].values())
153    runners_binaries_expected = [bin if os.path.isabs(bin) else os.path.join('zephyr', bin) for bin in runners_binaries]
154    binaries = project_builder._get_binaries_from_runners()
155    assert sorted(binaries) == sorted(runners_binaries_expected)
156
157
158def test_if_runners_file_is_sanitized_properly(runners, project_builder: ProjectBuilder):
159    runners_file_path = os.path.join(project_builder.instance.build_dir, 'zephyr', 'runners.yaml')
160    with open(runners_file_path, 'r') as file:
161        unsanitized_runners_content = yaml.safe_load(file)
162    unsanitized_runners_binaries = list(unsanitized_runners_content['config'].values())
163    abs_paths = [bin for bin in unsanitized_runners_binaries if os.path.isabs(bin)]
164    assert len(abs_paths) > 0
165
166    project_builder._sanitize_runners_file()
167
168    with open(runners_file_path, 'r') as file:
169        sanitized_runners_content = yaml.safe_load(file)
170    sanitized_runners_binaries = list(sanitized_runners_content['config'].values())
171    abs_paths = [bin for bin in sanitized_runners_binaries if os.path.isabs(bin)]
172    assert len(abs_paths) == 0
173
174
175def test_if_zephyr_base_is_sanitized_properly(project_builder: ProjectBuilder):
176    sanitized_path_expected = os.path.join('sanitized', 'path')
177    path_to_sanitize = os.path.join(os.path.realpath(ZEPHYR_BASE), sanitized_path_expected)
178    cmakecache_file_path = os.path.join(project_builder.instance.build_dir, 'CMakeCache.txt')
179    with open(cmakecache_file_path, 'w') as file:
180        file.write(path_to_sanitize)
181
182    project_builder._sanitize_zephyr_base_from_files()
183
184    with open(cmakecache_file_path, 'r') as file:
185        sanitized_path = file.read()
186    assert sanitized_path == sanitized_path_expected
187
188
189def test_executioncounter(capfd):
190    ec = ExecutionCounter(total=12)
191
192    ec.cases = 25
193    ec.skipped_cases = 6
194    ec.error = 2
195    ec.iteration = 2
196    ec.done = 9
197    ec.passed = 6
198    ec.filtered_configs = 3
199    ec.filtered_runtime = 1
200    ec.filtered_static = 2
201    ec.failed = 1
202
203    ec.summary()
204
205    out, err = capfd.readouterr()
206    sys.stdout.write(out)
207    sys.stderr.write(err)
208
209    assert (
210"├── Total test suites: 12\n"
211"├── Processed test suites: 9\n"
212"│   ├── Filtered test suites: 3\n"
213"│   │   ├── Filtered test suites (static): 2\n"
214"│   │   └── Filtered test suites (at runtime): 1\n"
215"│   └── Selected test suites: 6\n"
216"│       ├── Skipped test suites: 0\n"
217"│       ├── Passed test suites: 6\n"
218"│       ├── Built only test suites: 0\n"
219"│       ├── Failed test suites: 1\n"
220"│       └── Errors in test suites: 2\n"
221"└── Total test cases: 25\n"
222"    ├── Filtered test cases: 0\n"
223"    └── Selected test cases: 25\n"
224"        ├── Passed test cases: 0\n"
225"        ├── Skipped test cases: 6\n"
226"        ├── Built only test cases: 0\n"
227"        ├── Blocked test cases: 0\n"
228"        ├── Failed test cases: 0\n"
229"        └── Errors in test cases: 0\n"
230    ) in out
231
232    assert ec.cases == 25
233    assert ec.skipped_cases == 6
234    assert ec.error == 2
235    assert ec.iteration == 2
236    assert ec.done == 9
237    assert ec.passed == 6
238    assert ec.filtered_configs == 3
239    assert ec.filtered_runtime == 1
240    assert ec.filtered_static == 2
241    assert ec.failed == 1
242
243
244def test_cmake_parse_generated(mocked_jobserver):
245    testsuite_mock = mock.Mock()
246    platform_mock = mock.Mock()
247    source_dir = os.path.join('source', 'dir')
248    build_dir = os.path.join('build', 'dir')
249
250    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
251                  mocked_jobserver)
252
253    result = cmake.parse_generated()
254
255    assert cmake.defconfig == {}
256    assert result == {}
257
258
259TESTDATA_1_1 = [
260    ('linux'),
261    ('nt')
262]
263TESTDATA_1_2 = [
264    (0, False, 'dummy out',
265     True, True, TwisterStatus.NOTRUN, None, False, True),
266    (0, True, '',
267     False, False, TwisterStatus.PASS, None, False, False),
268    (1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
269     True,  True, TwisterStatus.SKIP, 'FLASH overflow', True, False),
270    (1, True, 'Error: Image size (99 B) + trailer (1 B) exceeds requested size',
271     True, True, TwisterStatus.SKIP, 'imgtool overflow', True, False),
272    (1, True, 'mock.ANY',
273     True, True, TwisterStatus.ERROR, 'Build failure', False, False)
274]
275
276@pytest.mark.parametrize(
277    'return_code, is_instance_run, p_out, expect_returncode,' \
278    ' expect_writes, expected_status, expected_reason,' \
279    ' expected_change_skip, expected_add_missing',
280    TESTDATA_1_2,
281    ids=['no error, no instance run', 'no error, instance run',
282         'error - region overflow', 'error - image size exceed', 'error']
283)
284@pytest.mark.parametrize('sys_platform', TESTDATA_1_1)
285def test_cmake_run_build(
286    sys_platform,
287    return_code,
288    is_instance_run,
289    p_out,
290    expect_returncode,
291    expect_writes,
292    expected_status,
293    expected_reason,
294    expected_change_skip,
295    expected_add_missing
296):
297    process_mock = mock.Mock(
298        returncode=return_code,
299        communicate=mock.Mock(
300            return_value=(p_out.encode(sys.getdefaultencoding()), None)
301        )
302    )
303
304    def mock_popen(*args, **kwargs):
305        return process_mock
306
307    testsuite_mock = mock.Mock()
308    platform_mock = mock.Mock()
309    platform_mock.name = '<platform name>'
310    source_dir = os.path.join('source', 'dir')
311    build_dir = os.path.join('build', 'dir')
312    jobserver_mock = mock.Mock(
313        popen=mock.Mock(side_effect=mock_popen)
314    )
315    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
316    instance_mock.build_time = 0
317    instance_mock.run = is_instance_run
318    instance_mock.status = TwisterStatus.NONE
319    instance_mock.reason = None
320
321    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
322                  jobserver_mock)
323    cmake.cwd = os.path.join('dummy', 'working', 'dir')
324    cmake.instance = instance_mock
325    cmake.options = mock.Mock()
326    cmake.options.overflow_as_errors = False
327
328    cmake_path = os.path.join('dummy', 'cmake')
329
330    popen_mock = mock.Mock(side_effect=mock_popen)
331    change_mock = mock.Mock()
332
333    with mock.patch('sys.platform', sys_platform), \
334         mock.patch('shutil.which', return_value=cmake_path), \
335         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
336                    change_mock), \
337         mock.patch('builtins.open', mock.mock_open()), \
338         mock.patch('subprocess.Popen', popen_mock):
339        result = cmake.run_build(args=['arg1', 'arg2'])
340
341    expected_results = {}
342    if expect_returncode:
343        expected_results['returncode'] = return_code
344    if expected_results == {}:
345        expected_results = None
346
347    assert expected_results == result
348
349    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
350                   popen_mock
351    popen_caller.assert_called_once_with(
352        [os.path.join('dummy', 'cmake'), 'arg1', 'arg2'],
353        stdout=subprocess.PIPE,
354        stderr=subprocess.STDOUT,
355        cwd=os.path.join('dummy', 'working', 'dir')
356    )
357
358    assert cmake.instance.status == expected_status
359    assert cmake.instance.reason == expected_reason
360
361    if expected_change_skip:
362        change_mock.assert_called_once()
363
364    if expected_add_missing:
365        cmake.instance.add_missing_case_status.assert_called_once_with(
366            TwisterStatus.NOTRUN, 'Test was built only'
367        )
368
369
370TESTDATA_2_1 = [
371    ('linux'),
372    ('nt')
373]
374TESTDATA_2_2 = [
375    (True, ['dummy_stage_1', 'ds2'],
376     0, False, '',
377     True, True, False,
378     TwisterStatus.NONE, None,
379     [os.path.join('dummy', 'cmake'),
380      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1', '-DTC_NAME=testcase',
381      '-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=y',
382      '-DEXTRA_GEN_EDT_ARGS=--edtlib-Werror', '-Gdummy_generator',
383      f'-DPython3_EXECUTABLE={pathlib.Path(sys.executable).as_posix()}',
384      '-S' + os.path.join('source', 'dir'),
385      'arg1', 'arg2',
386      '-DBOARD=<platform name>',
387      '-DSNIPPET=dummy snippet 1;ds2',
388      '-DMODULES=dummy_stage_1,ds2',
389      '-Pzephyr_base/cmake/package_helper.cmake']),
390    (False, [],
391     1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
392     True, False, True,
393     TwisterStatus.ERROR, 'CMake build failure',
394     [os.path.join('dummy', 'cmake'),
395      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1', '-DTC_NAME=testcase',
396      '-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=n',
397      '-DEXTRA_GEN_EDT_ARGS=', '-Gdummy_generator',
398      f'-DPython3_EXECUTABLE={pathlib.Path(sys.executable).as_posix()}',
399      '-Szephyr_base/share/sysbuild',
400      '-DAPP_DIR=' + os.path.join('source', 'dir'),
401      'arg1', 'arg2',
402      '-DBOARD=<platform name>',
403      '-DSNIPPET=dummy snippet 1;ds2']),
404]
405
406@pytest.mark.parametrize(
407    'error_warns, f_stages,' \
408    ' return_code, is_instance_run, p_out, expect_returncode,' \
409    ' expect_filter, expect_writes, expected_status, expected_reason,' \
410    ' expected_cmd',
411    TESTDATA_2_2,
412    ids=['filter_stages with success', 'no stages with error']
413)
414@pytest.mark.parametrize('sys_platform', TESTDATA_2_1)
415def test_cmake_run_cmake(
416    sys_platform,
417    error_warns,
418    f_stages,
419    return_code,
420    is_instance_run,
421    p_out,
422    expect_returncode,
423    expect_filter,
424    expect_writes,
425    expected_status,
426    expected_reason,
427    expected_cmd
428):
429    process_mock = mock.Mock(
430        returncode=return_code,
431        communicate=mock.Mock(
432            return_value=(p_out.encode(sys.getdefaultencoding()), None)
433        )
434    )
435
436    def mock_popen(*args, **kwargs):
437        return process_mock
438
439    testsuite_mock = mock.Mock()
440    testsuite_mock.sysbuild = True
441    platform_mock = mock.Mock()
442    platform_mock.name = '<platform name>'
443    source_dir = os.path.join('source', 'dir')
444    build_dir = os.path.join('build', 'dir')
445    jobserver_mock = mock.Mock(
446        popen=mock.Mock(side_effect=mock_popen)
447    )
448    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
449    instance_mock.run = is_instance_run
450    instance_mock.run_id = 1
451    instance_mock.build_time = 0
452    instance_mock.status = TwisterStatus.NONE
453    instance_mock.reason = None
454    instance_mock.testsuite = mock.Mock()
455    instance_mock.testsuite.name = 'testcase'
456    instance_mock.testsuite.required_snippets = ['dummy snippet 1', 'ds2']
457    instance_mock.testcases = [mock.Mock(), mock.Mock()]
458    instance_mock.testcases[0].status = TwisterStatus.NONE
459    instance_mock.testcases[1].status = TwisterStatus.NONE
460
461    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
462                  jobserver_mock)
463    cmake.cwd = os.path.join('dummy', 'working', 'dir')
464    cmake.instance = instance_mock
465    cmake.options = mock.Mock()
466    cmake.options.disable_warnings_as_errors = not error_warns
467    cmake.options.overflow_as_errors = False
468    cmake.env = mock.Mock()
469    cmake.env.generator = 'dummy_generator'
470
471    cmake_path = os.path.join('dummy', 'cmake')
472
473    popen_mock = mock.Mock(side_effect=mock_popen)
474    change_mock = mock.Mock()
475
476    with mock.patch('sys.platform', sys_platform), \
477         mock.patch('shutil.which', return_value=cmake_path), \
478         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
479                    change_mock), \
480         mock.patch('twisterlib.runner.canonical_zephyr_base',
481                    'zephyr_base'), \
482         mock.patch('builtins.open', mock.mock_open()), \
483         mock.patch('subprocess.Popen', popen_mock):
484        result = cmake.run_cmake(args=['arg1', 'arg2'], filter_stages=f_stages)
485
486    expected_results = {}
487    if expect_returncode:
488        expected_results['returncode'] = return_code
489    if expect_filter:
490        expected_results['filter'] = {}
491    if expected_results == {}:
492        expected_results = None
493
494    assert expected_results == result
495
496    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
497                   popen_mock
498    popen_caller.assert_called_once_with(
499        expected_cmd,
500        stdout=subprocess.PIPE,
501        stderr=subprocess.STDOUT,
502        cwd=os.path.join('dummy', 'working', 'dir')
503    )
504
505    assert cmake.instance.status == expected_status
506    assert cmake.instance.reason == expected_reason
507
508    for tc in cmake.instance.testcases:
509        assert tc.status == cmake.instance.status
510
511
512TESTDATA_3 = [
513    ('unit_testing', [], False, True, None, True, None, True,
514     None, None, {}, {}, None, None, [], {}),
515    (
516        'other', [], True,
517        True, ['dummy', 'west', 'options'], True,
518        None, True,
519        os.path.join('domain', 'build', 'dir', 'zephyr', '.config'),
520        os.path.join('domain', 'build', 'dir', 'zephyr', 'edt.pickle'),
521        {'CONFIG_FOO': 'no'},
522        {'dummy cache elem': 1},
523        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
524         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
525        b'dummy edt pickle contents',
526        [f'Loaded sysbuild domain data from' \
527         f' {os.path.join("build", "dir", "domains.yaml")}'],
528        {os.path.join('other', 'dummy.testsuite.name'): True}
529    ),
530    (
531        'other', ['kconfig'], True,
532        True, ['dummy', 'west', 'options'], True,
533        'Dummy parse results', True,
534        os.path.join('build', 'dir', 'zephyr', '.config'),
535        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
536        {'CONFIG_FOO': 'no'},
537        {'dummy cache elem': 1},
538        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
539         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
540        b'dummy edt pickle contents',
541        [],
542        {os.path.join('other', 'dummy.testsuite.name'): False}
543    ),
544    (
545        'other', ['other'], False,
546        False, None, True,
547        'Dummy parse results', True,
548        None,
549        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
550        {},
551        {},
552        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True},
553        b'dummy edt pickle contents',
554        [],
555        {os.path.join('other', 'dummy.testsuite.name'): False}
556    ),
557    (
558        'other', ['other'], True,
559        False, None, True,
560        'Dummy parse results', True,
561        None,
562        None,
563        {},
564        {},
565        {},
566        None,
567        ['Sysbuild test will be skipped. West must be used for flashing.'],
568        {os.path.join('other', 'dummy.testsuite.name'): True}
569    ),
570    (
571        'other', ['other'], False,
572        True, None, False,
573        'Dummy parse results', True,
574        None,
575        None,
576        {},
577        {'dummy cache elem': 1},
578        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
579         'dummy cache elem': 1},
580        None,
581        [],
582        {os.path.join('other', 'dummy.testsuite.name'): False}
583    ),
584    (
585        'other', ['other'], False,
586        True, None, True,
587        'Dummy parse results', True,
588        None,
589        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
590        {},
591        {'dummy cache elem': 1},
592        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
593         'dummy cache elem': 1},
594        b'dummy edt pickle contents',
595        [],
596        {os.path.join('other', 'dummy.testsuite.name'): False}
597    ),
598    (
599        'other', ['other'], False,
600        True, None, True,
601        None, True,
602        None,
603        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
604        {},
605        {'dummy cache elem': 1},
606        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
607         'dummy cache elem': 1},
608        b'dummy edt pickle contents',
609        [],
610        {os.path.join('other', 'dummy.testsuite.name'): True}
611    ),
612    (
613        'other', ['other'], False,
614        True, None, True,
615        'Dummy parse results', False,
616        None,
617        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
618        {},
619        {'dummy cache elem': 1},
620        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
621         'dummy cache elem': 1},
622        b'dummy edt pickle contents',
623        [],
624        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
625         'dummy cache elem': 1}
626    ),
627    (
628        'other', ['other'], False,
629        True, None, True,
630        SyntaxError, True,
631        None,
632        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
633        {},
634        {'dummy cache elem': 1},
635        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
636         'dummy cache elem': 1},
637        b'dummy edt pickle contents',
638        ['Failed processing testsuite.yaml'],
639        SyntaxError
640    ),
641]
642
643@pytest.mark.parametrize(
644    'platform_name, filter_stages, sysbuild,' \
645    ' do_find_cache, west_flash_options, edt_exists,' \
646    ' parse_results, testsuite_filter,' \
647    ' expected_defconfig_path, expected_edt_pickle_path,' \
648    ' expected_defconfig, expected_cmakecache, expected_filter_data,' \
649    ' expected_edt,' \
650    ' expected_logs, expected_return',
651    TESTDATA_3,
652    ids=['unit testing', 'domain', 'kconfig', 'no cache',
653         'no west options', 'no edt',
654         'parse result', 'no parse result', 'no testsuite filter', 'parse err']
655)
656def test_filterbuilder_parse_generated(
657    caplog,
658    mocked_jobserver,
659    platform_name,
660    filter_stages,
661    sysbuild,
662    do_find_cache,
663    west_flash_options,
664    edt_exists,
665    parse_results,
666    testsuite_filter,
667    expected_defconfig_path,
668    expected_edt_pickle_path,
669    expected_defconfig,
670    expected_cmakecache,
671    expected_filter_data,
672    expected_edt,
673    expected_logs,
674    expected_return
675):
676    def mock_domains_from_file(*args, **kwargs):
677        dom = mock.Mock()
678        dom.build_dir = os.path.join('domain', 'build', 'dir')
679        res = mock.Mock(get_default_domain=mock.Mock(return_value=dom))
680        return res
681
682    def mock_cmakecache_from_file(*args, **kwargs):
683        if not do_find_cache:
684            raise FileNotFoundError(errno.ENOENT, 'Cache not found')
685        cache_elem = mock.Mock()
686        cache_elem.name = 'dummy cache elem'
687        cache_elem.value = 1
688        cache = [cache_elem]
689        return cache
690
691    def mock_open(filepath, *args, **kwargs):
692        if filepath == expected_defconfig_path:
693            rd = 'I am not a proper line\n' \
694                 'CONFIG_FOO="no"'
695        elif filepath == expected_edt_pickle_path:
696            rd = b'dummy edt pickle contents'
697        else:
698            raise FileNotFoundError(errno.ENOENT,
699                                    f'File {filepath} not mocked.')
700        return mock.mock_open(read_data=rd)()
701
702    def mock_parser(filter, filter_data, edt):
703        assert filter_data == expected_filter_data
704        if isinstance(parse_results, type) and \
705           issubclass(parse_results, Exception):
706            raise parse_results
707        return parse_results
708
709    def mock_pickle(datafile):
710        assert datafile.read() == expected_edt
711        return mock.Mock()
712
713    testsuite_mock = mock.Mock()
714    testsuite_mock.name = 'dummy.testsuite.name'
715    testsuite_mock.filter = testsuite_filter
716    platform_mock = mock.Mock()
717    platform_mock.name = platform_name
718    platform_mock.arch = 'dummy arch'
719    source_dir = os.path.join('source', 'dir')
720    build_dir = os.path.join('build', 'dir')
721
722    fb = FilterBuilder(testsuite_mock, platform_mock, source_dir, build_dir,
723                       mocked_jobserver)
724    instance_mock = mock.Mock()
725    instance_mock.sysbuild = 'sysbuild' if sysbuild else None
726    fb.instance = instance_mock
727    fb.env = mock.Mock()
728    fb.env.options = mock.Mock()
729    fb.env.options.west_flash = west_flash_options
730    fb.env.options.device_testing = True
731
732    environ_mock = {'env_dummy': True}
733
734    with mock.patch('twisterlib.runner.Domains.from_file',
735                    mock_domains_from_file), \
736         mock.patch('twisterlib.runner.CMakeCache.from_file',
737                    mock_cmakecache_from_file), \
738         mock.patch('builtins.open', mock_open), \
739         mock.patch('expr_parser.parse', mock_parser), \
740         mock.patch('pickle.load', mock_pickle), \
741         mock.patch('os.path.exists', return_value=edt_exists), \
742         mock.patch('os.environ', environ_mock), \
743         pytest.raises(expected_return) if \
744             isinstance(parse_results, type) and \
745             issubclass(parse_results, Exception) else nullcontext() as err:
746        result = fb.parse_generated(filter_stages)
747
748    if err:
749        assert True
750        return
751
752    assert all([log in caplog.text for log in expected_logs])
753
754    assert fb.defconfig == expected_defconfig
755
756    assert fb.cmake_cache == expected_cmakecache
757
758    assert result == expected_return
759
760
761TESTDATA_4 = [
762    (False, False, [f"see: {os.path.join('dummy', 'path', 'dummy_file.log')}"]),
763    (True, False, [os.path.join('dummy', 'path', 'dummy_file.log'),
764                    'file contents',
765                    os.path.join('dummy', 'path', 'dummy_file.log')]),
766    (True, True, [os.path.join('dummy', 'path', 'dummy_file.log'),
767                   'Unable to read log data ([Errno 2] ERROR: dummy_file.log)',
768                   os.path.join('dummy', 'path', 'dummy_file.log')]),
769]
770
771@pytest.mark.parametrize(
772    'inline_logs, read_exception, expected_logs',
773    TESTDATA_4,
774    ids=['basic', 'inline logs', 'inline logs+read_exception']
775)
776def test_projectbuilder_log_info(
777    caplog,
778    mocked_jobserver,
779    inline_logs,
780    read_exception,
781    expected_logs
782):
783    def mock_open(filename, *args, **kwargs):
784        if read_exception:
785            raise OSError(errno.ENOENT, f'ERROR: {os.path.basename(filename)}')
786        return mock.mock_open(read_data='file contents')()
787
788    def mock_realpath(filename, *args, **kwargs):
789        return os.path.join('path', filename)
790
791    def mock_abspath(filename, *args, **kwargs):
792        return os.path.join('dummy', filename)
793
794    filename = 'dummy_file.log'
795
796    env_mock = mock.Mock()
797    instance_mock = mock.Mock()
798
799    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
800    with mock.patch('builtins.open', mock_open), \
801         mock.patch('os.path.realpath', mock_realpath), \
802         mock.patch('os.path.abspath', mock_abspath):
803        pb.log_info(filename, inline_logs)
804
805    assert all([log in caplog.text for log in expected_logs])
806
807
808TESTDATA_5 = [
809    (True, False, False, "Valgrind error", 0, 0, 'build_dir/valgrind.log'),
810    (True, False, False, "Error", 0, 0, 'build_dir/build.log'),
811    (False, True, False, None, 1024, 0, 'build_dir/handler.log'),
812    (False, True, False, None, 0, 0, 'build_dir/build.log'),
813    (False, False, True, None, 0, 1024, 'build_dir/device.log'),
814    (False, False, True, None, 0, 0, 'build_dir/build.log'),
815    (False, False, False, None, 0, 0, 'build_dir/build.log'),
816]
817
818@pytest.mark.parametrize(
819    'valgrind_log_exists, handler_log_exists, device_log_exists,' \
820    ' instance_reason, handler_log_getsize, device_log_getsize, expected_log',
821    TESTDATA_5,
822    ids=['valgrind log', 'valgrind log unused',
823         'handler log', 'handler log unused',
824         'device log', 'device log unused',
825         'no logs']
826)
827def test_projectbuilder_log_info_file(
828    caplog,
829    mocked_jobserver,
830    valgrind_log_exists,
831    handler_log_exists,
832    device_log_exists,
833    instance_reason,
834    handler_log_getsize,
835    device_log_getsize,
836    expected_log
837):
838    def mock_exists(filename, *args, **kwargs):
839        if filename == 'build_dir/handler.log':
840            return handler_log_exists
841        if filename == 'build_dir/valgrind.log':
842            return valgrind_log_exists
843        if filename == 'build_dir/device.log':
844            return device_log_exists
845        return False
846
847    def mock_getsize(filename, *args, **kwargs):
848        if filename == 'build_dir/handler.log':
849            return handler_log_getsize
850        if filename == 'build_dir/device.log':
851            return device_log_getsize
852        return 0
853
854    env_mock = mock.Mock()
855    instance_mock = mock.Mock()
856    instance_mock.reason = instance_reason
857    instance_mock.build_dir = 'build_dir'
858
859    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
860
861    log_info_mock = mock.Mock()
862
863    with mock.patch('os.path.exists', mock_exists), \
864         mock.patch('os.path.getsize', mock_getsize), \
865         mock.patch('twisterlib.runner.ProjectBuilder.log_info', log_info_mock):
866        pb.log_info_file(None)
867
868    log_info_mock.assert_called_with(expected_log, mock.ANY)
869
870
871TESTDATA_6 = [
872    (
873        {'op': 'filter'},
874        TwisterStatus.FAIL,
875        'Failed',
876        mock.ANY,
877        mock.ANY,
878        mock.ANY,
879        mock.ANY,
880        mock.ANY,
881        mock.ANY,
882        mock.ANY,
883        mock.ANY,
884        mock.ANY,
885        mock.ANY,
886        mock.ANY,
887        [],
888        {'op': 'report', 'test': mock.ANY},
889        TwisterStatus.FAIL,
890        'Failed',
891        0,
892        None
893    ),
894    (
895        {'op': 'filter'},
896        TwisterStatus.PASS,
897        mock.ANY,
898        mock.ANY,
899        mock.ANY,
900        mock.ANY,
901        mock.ANY,
902        mock.ANY,
903        mock.ANY,
904        {'filter': { 'dummy instance name': True }},
905        mock.ANY,
906        mock.ANY,
907        mock.ANY,
908        mock.ANY,
909        ['filtering dummy instance name'],
910        {'op': 'report', 'test': mock.ANY},
911        TwisterStatus.FILTER,
912        'runtime filter',
913        1,
914        (TwisterStatus.FILTER,)
915    ),
916    (
917        {'op': 'filter'},
918        TwisterStatus.PASS,
919        mock.ANY,
920        mock.ANY,
921        mock.ANY,
922        mock.ANY,
923        mock.ANY,
924        mock.ANY,
925        mock.ANY,
926        {'filter': { 'another dummy instance name': True }},
927        mock.ANY,
928        mock.ANY,
929        mock.ANY,
930        mock.ANY,
931        [],
932        {'op': 'cmake', 'test': mock.ANY},
933        TwisterStatus.PASS,
934        mock.ANY,
935        0,
936        None
937    ),
938    (
939        {'op': 'cmake'},
940        TwisterStatus.ERROR,
941        'dummy error',
942        mock.ANY,
943        mock.ANY,
944        mock.ANY,
945        mock.ANY,
946        mock.ANY,
947        mock.ANY,
948        mock.ANY,
949        mock.ANY,
950        mock.ANY,
951        mock.ANY,
952        mock.ANY,
953        [],
954        {'op': 'report', 'test': mock.ANY},
955        TwisterStatus.ERROR,
956        'dummy error',
957        0,
958        None
959    ),
960    (
961        {'op': 'cmake'},
962        TwisterStatus.NONE,
963        mock.ANY,
964        mock.ANY,
965        mock.ANY,
966        True,
967        mock.ANY,
968        mock.ANY,
969        mock.ANY,
970        mock.ANY,
971        mock.ANY,
972        mock.ANY,
973        mock.ANY,
974        mock.ANY,
975        [],
976        {'op': 'report', 'test': mock.ANY},
977        TwisterStatus.NOTRUN,
978        mock.ANY,
979        0,
980        None
981    ),
982    (
983        {'op': 'cmake'},
984        'success',
985        mock.ANY,
986        mock.ANY,
987        mock.ANY,
988        True,
989        mock.ANY,
990        mock.ANY,
991        mock.ANY,
992        mock.ANY,
993        mock.ANY,
994        mock.ANY,
995        mock.ANY,
996        mock.ANY,
997        [],
998        {'op': 'report', 'test': mock.ANY},
999        'success',
1000        mock.ANY,
1001        0,
1002        None
1003    ),
1004    (
1005        {'op': 'cmake'},
1006        'success',
1007        mock.ANY,
1008        mock.ANY,
1009        mock.ANY,
1010        False,
1011        mock.ANY,
1012        mock.ANY,
1013        mock.ANY,
1014        {'filter': {'dummy instance name': True}},
1015        mock.ANY,
1016        mock.ANY,
1017        mock.ANY,
1018        mock.ANY,
1019        ['filtering dummy instance name'],
1020        {'op': 'report', 'test': mock.ANY},
1021        TwisterStatus.FILTER,
1022        'runtime filter',
1023        1,
1024        (TwisterStatus.FILTER,) # this is a tuple
1025    ),
1026    (
1027        {'op': 'cmake'},
1028        'success',
1029        mock.ANY,
1030        mock.ANY,
1031        mock.ANY,
1032        False,
1033        mock.ANY,
1034        mock.ANY,
1035        mock.ANY,
1036        {'filter': {}},
1037        mock.ANY,
1038        mock.ANY,
1039        mock.ANY,
1040        mock.ANY,
1041        [],
1042        {'op': 'build', 'test': mock.ANY},
1043        'success',
1044        mock.ANY,
1045        0,
1046        None
1047    ),
1048    (
1049        {'op': 'build'},
1050        mock.ANY,
1051        None,
1052        mock.ANY,
1053        mock.ANY,
1054        mock.ANY,
1055        mock.ANY,
1056        mock.ANY,
1057        mock.ANY,
1058        mock.ANY,
1059        None,
1060        mock.ANY,
1061        mock.ANY,
1062        mock.ANY,
1063        ['build test: dummy instance name'],
1064        {'op': 'report', 'test': mock.ANY},
1065        TwisterStatus.ERROR,
1066        'Build Failure',
1067        0,
1068        None
1069    ),
1070    (
1071        {'op': 'build'},
1072        TwisterStatus.SKIP,
1073        mock.ANY,
1074        mock.ANY,
1075        mock.ANY,
1076        mock.ANY,
1077        mock.ANY,
1078        mock.ANY,
1079        mock.ANY,
1080        mock.ANY,
1081        {'returncode': 0},
1082        mock.ANY,
1083        mock.ANY,
1084        mock.ANY,
1085        ['build test: dummy instance name',
1086         'Determine test cases for test instance: dummy instance name'],
1087        {'op': 'gather_metrics', 'test': mock.ANY},
1088        mock.ANY,
1089        mock.ANY,
1090        0,
1091        (TwisterStatus.SKIP, mock.ANY)
1092    ),
1093    (
1094        {'op': 'build'},
1095        TwisterStatus.PASS,
1096        mock.ANY,
1097        mock.ANY,
1098        mock.ANY,
1099        mock.ANY,
1100        mock.ANY,
1101        mock.ANY,
1102        mock.ANY,
1103        mock.ANY,
1104        {'dummy': 'dummy'},
1105        mock.ANY,
1106        mock.ANY,
1107        mock.ANY,
1108        ['build test: dummy instance name'],
1109        {'op': 'report', 'test': mock.ANY},
1110        TwisterStatus.PASS,
1111        mock.ANY,
1112        0,
1113        (TwisterStatus.BLOCK, mock.ANY)
1114    ),
1115    (
1116        {'op': 'build'},
1117        'success',
1118        mock.ANY,
1119        mock.ANY,
1120        mock.ANY,
1121        mock.ANY,
1122        mock.ANY,
1123        mock.ANY,
1124        mock.ANY,
1125        mock.ANY,
1126        {'returncode': 0},
1127        mock.ANY,
1128        mock.ANY,
1129        mock.ANY,
1130        ['build test: dummy instance name',
1131         'Determine test cases for test instance: dummy instance name'],
1132        {'op': 'gather_metrics', 'test': mock.ANY},
1133        mock.ANY,
1134        mock.ANY,
1135        0,
1136        None
1137    ),
1138    (
1139        {'op': 'build'},
1140        'success',
1141        mock.ANY,
1142        mock.ANY,
1143        mock.ANY,
1144        mock.ANY,
1145        mock.ANY,
1146        mock.ANY,
1147        mock.ANY,
1148        mock.ANY,
1149        {'returncode': 0},
1150        mock.ANY,
1151        mock.ANY,
1152        BuildError,
1153        ['build test: dummy instance name',
1154         'Determine test cases for test instance: dummy instance name'],
1155        {'op': 'report', 'test': mock.ANY},
1156        TwisterStatus.ERROR,
1157        'Determine Testcases Error!',
1158        0,
1159        None
1160    ),
1161    (
1162        {'op': 'gather_metrics'},
1163        mock.ANY,
1164        mock.ANY,
1165        True,
1166        True,
1167        mock.ANY,
1168        mock.ANY,
1169        mock.ANY,
1170        mock.ANY,
1171        mock.ANY,
1172        mock.ANY,
1173        {'returncode': 0},  # metrics_res
1174        mock.ANY,
1175        mock.ANY,
1176        [],
1177        {'op': 'run', 'test': mock.ANY},
1178        mock.ANY,
1179        mock.ANY,
1180        0,
1181        None
1182    ),  # 'gather metrics, run and ready handler'
1183    (
1184        {'op': 'gather_metrics'},
1185        mock.ANY,
1186        mock.ANY,
1187        False,
1188        True,
1189        mock.ANY,
1190        mock.ANY,
1191        mock.ANY,
1192        mock.ANY,
1193        mock.ANY,
1194        mock.ANY,
1195        {'returncode': 0},  # metrics_res
1196        mock.ANY,
1197        mock.ANY,
1198        [],
1199        {'op': 'report', 'test': mock.ANY},
1200        mock.ANY,
1201        mock.ANY,
1202        0,
1203        None
1204    ),  # 'gather metrics'
1205    (
1206        {'op': 'gather_metrics'},
1207        mock.ANY,
1208        mock.ANY,
1209        False,
1210        True,
1211        mock.ANY,
1212        mock.ANY,
1213        mock.ANY,
1214        mock.ANY,
1215        mock.ANY,
1216        {'returncode': 0},  # build_res
1217        {'returncode': 1},  # metrics_res
1218        mock.ANY,
1219        mock.ANY,
1220        [],
1221        {'op': 'report', 'test': mock.ANY},
1222        'error',
1223        'Build Failure at gather_metrics.',
1224        0,
1225        None
1226    ),  # 'build ok, gather metrics fail',
1227    (
1228        {'op': 'run'},
1229        'success',
1230        'OK',
1231        mock.ANY,
1232        mock.ANY,
1233        mock.ANY,
1234        mock.ANY,
1235        mock.ANY,
1236        mock.ANY,
1237        mock.ANY,
1238        mock.ANY,
1239        mock.ANY,
1240        None,
1241        mock.ANY,
1242        ['run test: dummy instance name',
1243         'run status: dummy instance name success'],
1244        {'op': 'report', 'test': mock.ANY, 'status': 'success', 'reason': 'OK'},
1245        'success',
1246        'OK',
1247        0,
1248        None
1249    ),
1250    (
1251        {'op': 'run'},
1252        TwisterStatus.FAIL,
1253        mock.ANY,
1254        mock.ANY,
1255        mock.ANY,
1256        mock.ANY,
1257        mock.ANY,
1258        mock.ANY,
1259        mock.ANY,
1260        mock.ANY,
1261        mock.ANY,
1262        mock.ANY,
1263        RuntimeError,
1264        mock.ANY,
1265        ['run test: dummy instance name',
1266         'run status: dummy instance name failed',
1267         'RuntimeError: Pipeline Error!'],
1268        None,
1269        TwisterStatus.FAIL,
1270        mock.ANY,
1271        0,
1272        None
1273    ),
1274    (
1275        {'op': 'report'},
1276        mock.ANY,
1277        mock.ANY,
1278        mock.ANY,
1279        mock.ANY,
1280        mock.ANY,
1281        False,
1282        True,
1283        mock.ANY,
1284        mock.ANY,
1285        mock.ANY,
1286        mock.ANY,
1287        mock.ANY,
1288        mock.ANY,
1289        [],
1290        {'op': 'cleanup', 'mode': 'device', 'test': mock.ANY},
1291        mock.ANY,
1292        mock.ANY,
1293        0,
1294        None
1295    ),
1296    (
1297        {'op': 'report'},
1298        TwisterStatus.PASS,
1299        mock.ANY,
1300        mock.ANY,
1301        mock.ANY,
1302        mock.ANY,
1303        False,
1304        False,
1305        'pass',
1306        mock.ANY,
1307        mock.ANY,
1308        mock.ANY,
1309        mock.ANY,
1310        mock.ANY,
1311        [],
1312        {'op': 'cleanup', 'mode': 'passed', 'test': mock.ANY},
1313        mock.ANY,
1314        mock.ANY,
1315        0,
1316        None
1317    ),
1318    (
1319        {'op': 'report'},
1320        mock.ANY,
1321        mock.ANY,
1322        mock.ANY,
1323        mock.ANY,
1324        mock.ANY,
1325        False,
1326        False,
1327        'all',
1328        mock.ANY,
1329        mock.ANY,
1330        mock.ANY,
1331        mock.ANY,
1332        mock.ANY,
1333        [],
1334        {'op': 'cleanup', 'mode': 'all', 'test': mock.ANY},
1335        mock.ANY,
1336        mock.ANY,
1337        0,
1338        None
1339    ),
1340    (
1341        {'op': 'report'},
1342        mock.ANY,
1343        mock.ANY,
1344        mock.ANY,
1345        mock.ANY,
1346        mock.ANY,
1347        False,
1348        False,
1349        'other',
1350        mock.ANY,
1351        mock.ANY,
1352        mock.ANY,
1353        mock.ANY,
1354        mock.ANY,
1355        [],
1356        None,
1357        mock.ANY,
1358        mock.ANY,
1359        0,
1360        None
1361    ),
1362    (
1363        {'op': 'cleanup', 'mode': 'device'},
1364        mock.ANY,
1365        mock.ANY,
1366        mock.ANY,
1367        mock.ANY,
1368        mock.ANY,
1369        mock.ANY,
1370        mock.ANY,
1371        mock.ANY,
1372        mock.ANY,
1373        mock.ANY,
1374        mock.ANY,
1375        mock.ANY,
1376        mock.ANY,
1377        [],
1378        None,
1379        mock.ANY,
1380        mock.ANY,
1381        0,
1382        None
1383    ),
1384    (
1385        {'op': 'cleanup', 'mode': 'passed'},
1386        mock.ANY,
1387        mock.ANY,
1388        mock.ANY,
1389        mock.ANY,
1390        mock.ANY,
1391        mock.ANY,
1392        mock.ANY,
1393        mock.ANY,
1394        mock.ANY,
1395        mock.ANY,
1396        mock.ANY,
1397        mock.ANY,
1398        mock.ANY,
1399        [],
1400        None,
1401        mock.ANY,
1402        mock.ANY,
1403        0,
1404        None
1405    ),
1406    (
1407        {'op': 'cleanup', 'mode': 'all'},
1408        mock.ANY,
1409        'Valgrind error',
1410        mock.ANY,
1411        mock.ANY,
1412        mock.ANY,
1413        mock.ANY,
1414        mock.ANY,
1415        mock.ANY,
1416        mock.ANY,
1417        mock.ANY,
1418        mock.ANY,
1419        mock.ANY,
1420        mock.ANY,
1421        [],
1422        None,
1423        mock.ANY,
1424        mock.ANY,
1425        0,
1426        None
1427    ),
1428    (
1429        {'op': 'cleanup', 'mode': 'all'},
1430        mock.ANY,
1431        'CMake build failure',
1432        mock.ANY,
1433        mock.ANY,
1434        mock.ANY,
1435        mock.ANY,
1436        mock.ANY,
1437        mock.ANY,
1438        mock.ANY,
1439        mock.ANY,
1440        mock.ANY,
1441        mock.ANY,
1442        mock.ANY,
1443        [],
1444        None,
1445        mock.ANY,
1446        mock.ANY,
1447        0,
1448        None
1449    ),
1450]
1451
1452@pytest.mark.parametrize(
1453    'message,' \
1454    ' instance_status, instance_reason, instance_run, instance_handler_ready,' \
1455    ' options_cmake_only,' \
1456    ' options_coverage, options_prep_artifacts, options_runtime_artifacts,' \
1457    ' cmake_res, build_res, metrics_res,' \
1458    ' pipeline_runtime_error, determine_testcases_build_error,' \
1459    ' expected_logs, resulting_message,' \
1460    ' expected_status, expected_reason, expected_skipped, expected_missing',
1461    TESTDATA_6,
1462    ids=[
1463        'filter, failed', 'filter, cmake res', 'filter, no cmake res',
1464        'cmake, failed', 'cmake, cmake_only, no status', 'cmake, cmake_only',
1465        'cmake, no cmake_only, cmake res', 'cmake, no cmake_only, no cmake res',
1466        'build, no build res', 'build, skipped', 'build, blocked',
1467        'build, determine testcases', 'build, determine testcases Error',
1468        'gather metrics, run and ready handler', 'gather metrics',
1469        'build ok, gather metrics fail',
1470        'run', 'run, Pipeline Runtime Error',
1471        'report, prep artifacts for testing',
1472        'report, runtime artifact cleanup pass, status passed',
1473        'report, runtime artifact cleanup all', 'report, no message put',
1474        'cleanup, device', 'cleanup, mode passed', 'cleanup, mode all',
1475        'cleanup, mode all, cmake build failure'
1476    ]
1477)
1478def test_projectbuilder_process(
1479    caplog,
1480    mocked_jobserver,
1481    message,
1482    instance_status,
1483    instance_reason,
1484    instance_run,
1485    instance_handler_ready,
1486    options_cmake_only,
1487    options_coverage,
1488    options_prep_artifacts,
1489    options_runtime_artifacts,
1490    cmake_res,
1491    build_res,
1492    metrics_res,
1493    pipeline_runtime_error,
1494    determine_testcases_build_error,
1495    expected_logs,
1496    resulting_message,
1497    expected_status,
1498    expected_reason,
1499    expected_skipped,
1500    expected_missing
1501):
1502    def mock_pipeline_put(msg):
1503        if isinstance(pipeline_runtime_error, type) and \
1504           issubclass(pipeline_runtime_error, Exception):
1505            raise RuntimeError('Pipeline Error!')
1506
1507    def mock_determine_testcases(res):
1508        if isinstance(determine_testcases_build_error, type) and \
1509           issubclass(determine_testcases_build_error, Exception):
1510            raise BuildError('Determine Testcases Error!')
1511
1512    instance_mock = mock.Mock()
1513    instance_mock.name = 'dummy instance name'
1514    instance_mock.status = instance_status
1515    instance_mock.reason = instance_reason
1516    instance_mock.run = instance_run
1517    instance_mock.handler = mock.Mock()
1518    instance_mock.handler.ready = instance_handler_ready
1519    instance_mock.testsuite.harness = 'test'
1520    env_mock = mock.Mock()
1521
1522    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1523    pb.options = mock.Mock()
1524    pb.options.coverage = options_coverage
1525    pb.options.prep_artifacts_for_testing = options_prep_artifacts
1526    pb.options.runtime_artifact_cleanup = options_runtime_artifacts
1527    pb.options.cmake_only = options_cmake_only
1528
1529    pb.cmake = mock.Mock(return_value=cmake_res)
1530    pb.build = mock.Mock(return_value=build_res)
1531    pb.determine_testcases = mock.Mock(side_effect=mock_determine_testcases)
1532
1533    pb.report_out = mock.Mock()
1534    pb.cleanup_artifacts = mock.Mock()
1535    pb.cleanup_device_testing_artifacts = mock.Mock()
1536    pb.run = mock.Mock()
1537    pb.gather_metrics = mock.Mock(return_value=metrics_res)
1538
1539    pipeline_mock = mock.Mock(put=mock.Mock(side_effect=mock_pipeline_put))
1540    done_mock = mock.Mock()
1541    lock_mock = mock.Mock(
1542        __enter__=mock.Mock(return_value=(mock.Mock(), mock.Mock())),
1543        __exit__=mock.Mock(return_value=None)
1544    )
1545    results_mock = mock.Mock()
1546    results_mock.filtered_runtime = 0
1547
1548    pb.process(pipeline_mock, done_mock, message, lock_mock, results_mock)
1549
1550    assert all([log in caplog.text for log in expected_logs])
1551
1552    if resulting_message:
1553        pipeline_mock.put.assert_called_with(resulting_message)
1554
1555    assert pb.instance.status == expected_status
1556    assert pb.instance.reason == expected_reason
1557    assert results_mock.filtered_runtime_increment.call_args_list == [mock.call()] * expected_skipped
1558
1559    if expected_missing:
1560        pb.instance.add_missing_case_status.assert_called_with(*expected_missing)
1561
1562
1563TESTDATA_7 = [
1564    (
1565        True,
1566        [
1567            'z_ztest_unit_test__dummy_suite1_name__dummy_test_name1',
1568            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1569            'no match'
1570        ],
1571        [
1572            'dummy.test_id.dummy_suite1_name.dummy_name1',
1573            'dummy.test_id.dummy_suite2_name.dummy_name2'
1574        ]
1575    ),
1576    (
1577        False,
1578        [
1579            'z_ztest_unit_test__dummy_suite1_name__dummy_test_name1',
1580            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1581            'no match'
1582        ],
1583        [
1584            'dummy_suite1_name.dummy_name1',
1585            'dummy_suite2_name.dummy_name2'
1586        ]
1587    ),
1588    (
1589        True,
1590        [
1591            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1592            'z_ztest_unit_test__bad_suite3_name_no_test',
1593            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name4E',
1594            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_bad_name1E',
1595            '_ZN12_GLOBAL__N_1L51z_ztest_unit_test_dummy_suite3_name__test_bad_name2E',
1596            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name5E',
1597            '_ZN15foobarnamespaceL54z_ztest_unit_test__dummy_suite3_name__test_dummy_name6E',
1598        ],
1599        [
1600           'dummy.test_id.dummy_suite2_name.dummy_name2',
1601           'dummy.test_id.dummy_suite3_name.dummy_name4',
1602           'dummy.test_id.dummy_suite3_name.bad_name1E',
1603           'dummy.test_id.dummy_suite3_name.dummy_name5',
1604           'dummy.test_id.dummy_suite3_name.dummy_name6',
1605        ]
1606    ),
1607    (
1608        True,
1609        [
1610            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1611            'z_ztest_unit_test__bad_suite3_name_no_test',
1612            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name4E',
1613            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_bad_name1E',
1614            '_ZN12_GLOBAL__N_1L51z_ztest_unit_test_dummy_suite3_name__test_bad_name2E',
1615            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name5E',
1616            '_ZN15foobarnamespaceL54z_ztest_unit_test__dummy_suite3_name__test_dummy_name6E',
1617        ],
1618        [
1619           'dummy_suite2_name.dummy_name2',
1620           'dummy_suite3_name.dummy_name4',
1621           'dummy_suite3_name.bad_name1E',
1622           'dummy_suite3_name.dummy_name5',
1623           'dummy_suite3_name.dummy_name6',
1624        ]
1625    ),
1626    (
1627        True,
1628        ['no match'],
1629        []
1630    ),
1631]
1632
1633@pytest.mark.parametrize(
1634    'detailed_id, symbols_names, added_tcs',
1635    TESTDATA_7,
1636    ids=['two hits, one miss', 'two hits short id', 'demangle', 'demangle short id', 'nothing']
1637)
1638def test_projectbuilder_determine_testcases(
1639    mocked_jobserver,
1640    mocked_env,
1641    detailed_id,
1642    symbols_names,
1643    added_tcs
1644):
1645    symbols_mock = [mock.Mock(n=name) for name in symbols_names]
1646    for m in symbols_mock:
1647        m.configure_mock(name=m.n)
1648
1649    sections_mock = [mock.Mock(spec=SymbolTableSection)]
1650    sections_mock[0].iter_symbols = mock.Mock(return_value=symbols_mock)
1651
1652    elf_mock = mock.Mock()
1653    elf_mock().iter_sections = mock.Mock(return_value=sections_mock)
1654
1655    results_mock = mock.Mock()
1656
1657    instance_mock = mock.Mock()
1658    instance_mock.testcases = []
1659    instance_mock.testsuite.id = 'dummy.test_id'
1660    instance_mock.testsuite.ztest_suite_names = []
1661    instance_mock.testsuite.detailed_test_id = detailed_id
1662    instance_mock.compose_case_name = mock.Mock(side_effect=iter(added_tcs))
1663
1664    pb = ProjectBuilder(instance_mock, mocked_env, mocked_jobserver)
1665
1666    with mock.patch('twisterlib.runner.ELFFile', elf_mock), \
1667         mock.patch('builtins.open', mock.mock_open()):
1668        pb.determine_testcases(results_mock)
1669
1670    pb.instance.add_testcase.assert_has_calls(
1671        [mock.call(name=x) for x in added_tcs]
1672    )
1673    pb.instance.testsuite.add_testcase.assert_has_calls(
1674        [mock.call(name=x) for x in added_tcs]
1675    )
1676
1677
1678TESTDATA_8 = [
1679    (
1680        ['addition.al'],
1681        'dummy',
1682        ['addition.al', '.config', 'zephyr']
1683    ),
1684    (
1685        [],
1686        'all',
1687        ['.config', 'zephyr', 'testsuite_extra.conf', 'twister']
1688    ),
1689]
1690
1691@pytest.mark.parametrize(
1692    'additional_keep, runtime_artifact_cleanup, expected_files',
1693    TESTDATA_8,
1694    ids=['additional keep', 'all cleanup']
1695)
1696def test_projectbuilder_cleanup_artifacts(
1697    tmpdir,
1698    mocked_jobserver,
1699    additional_keep,
1700    runtime_artifact_cleanup,
1701    expected_files
1702):
1703    # tmpdir
1704    # ┣ twister
1705    # ┃ ┗ testsuite_extra.conf
1706    # ┣ dummy_dir
1707    # ┃ ┗ dummy.del
1708    # ┣ dummy_link_dir -> zephyr
1709    # ┣ zephyr
1710    # ┃ ┗ .config
1711    # ┗ addition.al
1712    twister_dir = tmpdir.mkdir('twister')
1713    testsuite_extra_conf = twister_dir.join('testsuite_extra.conf')
1714    testsuite_extra_conf.write_text('dummy', 'utf-8')
1715
1716    dummy_dir = tmpdir.mkdir('dummy_dir')
1717    dummy_del = dummy_dir.join('dummy.del')
1718    dummy_del.write_text('dummy', 'utf-8')
1719
1720    zephyr = tmpdir.mkdir('zephyr')
1721    config = zephyr.join('.config')
1722    config.write_text('dummy', 'utf-8')
1723
1724    dummy_link_dir = tmpdir.join('dummy_link_dir')
1725    os.symlink(zephyr, dummy_link_dir)
1726
1727    addition_al = tmpdir.join('addition.al')
1728    addition_al.write_text('dummy', 'utf-8')
1729
1730    instance_mock = mock.Mock()
1731    instance_mock.build_dir = tmpdir
1732    env_mock = mock.Mock()
1733
1734    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1735    pb.options = mock.Mock(runtime_artifact_cleanup=runtime_artifact_cleanup)
1736
1737    pb.cleanup_artifacts(additional_keep)
1738
1739    files_left = [p.name for p in list(pathlib.Path(tmpdir).glob('**/*'))]
1740
1741    assert sorted(files_left) == sorted(expected_files)
1742
1743
1744def test_projectbuilder_cleanup_device_testing_artifacts(
1745    caplog,
1746    mocked_jobserver
1747):
1748    bins = [os.path.join('zephyr', 'file.bin')]
1749
1750    instance_mock = mock.Mock()
1751    instance_mock.sysbuild = False
1752    build_dir = os.path.join('build', 'dir')
1753    instance_mock.build_dir = build_dir
1754    env_mock = mock.Mock()
1755
1756    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1757    pb._get_binaries = mock.Mock(return_value=bins)
1758    pb.cleanup_artifacts = mock.Mock()
1759    pb._sanitize_files = mock.Mock()
1760
1761    pb.cleanup_device_testing_artifacts()
1762
1763    assert f'Cleaning up for Device Testing {build_dir}' in caplog.text
1764
1765    pb.cleanup_artifacts.assert_called_once_with(
1766        [os.path.join('zephyr', 'file.bin'),
1767         os.path.join('zephyr', 'runners.yaml')]
1768    )
1769    pb._sanitize_files.assert_called_once()
1770
1771
1772TESTDATA_9 = [
1773    (
1774        None,
1775        [],
1776        [os.path.join('zephyr', 'zephyr.hex'),
1777         os.path.join('zephyr', 'zephyr.bin'),
1778         os.path.join('zephyr', 'zephyr.elf'),
1779         os.path.join('zephyr', 'zephyr.exe')]
1780    ),
1781    (
1782        [os.path.join('dummy.bin'), os.path.join('dummy.hex')],
1783        [os.path.join('dir2', 'dummy.elf')],
1784        [os.path.join('zephyr', 'dummy.bin'),
1785         os.path.join('zephyr', 'dummy.hex'),
1786         os.path.join('dir2', 'dummy.elf')]
1787    ),
1788]
1789
1790@pytest.mark.parametrize(
1791    'platform_binaries, runner_binaries, expected_binaries',
1792    TESTDATA_9,
1793    ids=['default', 'valid']
1794)
1795def test_projectbuilder_get_binaries(
1796    mocked_jobserver,
1797    platform_binaries,
1798    runner_binaries,
1799    expected_binaries
1800):
1801    def mock_get_domains(*args, **kwargs):
1802        return []
1803
1804    instance_mock = mock.Mock()
1805    instance_mock.build_dir = os.path.join('build', 'dir')
1806    instance_mock.domains.get_domains.side_effect = mock_get_domains
1807    instance_mock.platform = mock.Mock()
1808    instance_mock.platform.binaries = platform_binaries
1809    env_mock = mock.Mock()
1810
1811    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1812    pb._get_binaries_from_runners = mock.Mock(return_value=runner_binaries)
1813
1814    bins = pb._get_binaries()
1815
1816    assert all(bin in expected_binaries for bin in bins)
1817    assert all(bin in bins for bin in expected_binaries)
1818
1819
1820TESTDATA_10 = [
1821    (None, None, []),
1822    (None, {'dummy': 'dummy'}, []),
1823    (   None,
1824        {
1825            'config': {
1826                'elf_file': '/absolute/path/dummy.elf',
1827                'bin_file': 'path/dummy.bin'
1828            }
1829        },
1830        ['/absolute/path/dummy.elf', os.path.join('zephyr', 'path/dummy.bin')]
1831    ),
1832    (   'test_domain',
1833        {
1834            'config': {
1835                'elf_file': '/absolute/path/dummy.elf',
1836                'bin_file': 'path/dummy.bin'
1837            }
1838        },
1839        ['/absolute/path/dummy.elf', os.path.join('test_domain', 'zephyr', 'path/dummy.bin')]
1840    ),
1841]
1842
1843@pytest.mark.parametrize(
1844    'domain, runners_content, expected_binaries',
1845    TESTDATA_10,
1846    ids=['no file', 'no config', 'valid', 'with domain']
1847)
1848def test_projectbuilder_get_binaries_from_runners(
1849    mocked_jobserver,
1850    domain,
1851    runners_content,
1852    expected_binaries
1853):
1854    def mock_exists(fname):
1855        assert fname == os.path.join('build', 'dir', domain if domain else '',
1856                                     'zephyr', 'runners.yaml')
1857        return runners_content is not None
1858
1859    instance_mock = mock.Mock()
1860    instance_mock.build_dir = os.path.join('build', 'dir')
1861    env_mock = mock.Mock()
1862
1863    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1864
1865    with mock.patch('os.path.exists', mock_exists), \
1866         mock.patch('builtins.open', mock.mock_open()), \
1867         mock.patch('yaml.load', return_value=runners_content):
1868        if domain:
1869            bins = pb._get_binaries_from_runners(domain)
1870        else:
1871            bins = pb._get_binaries_from_runners()
1872
1873    assert all(bin in expected_binaries for bin in bins)
1874    assert all(bin in bins for bin in expected_binaries)
1875
1876
1877def test_projectbuilder_sanitize_files(mocked_jobserver):
1878    instance_mock = mock.Mock()
1879    env_mock = mock.Mock()
1880
1881    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1882    pb._sanitize_runners_file = mock.Mock()
1883    pb._sanitize_zephyr_base_from_files = mock.Mock()
1884
1885    pb._sanitize_files()
1886
1887    pb._sanitize_runners_file.assert_called_once()
1888    pb._sanitize_zephyr_base_from_files.assert_called_once()
1889
1890
1891
1892TESTDATA_11 = [
1893    (None, None),
1894    ('dummy: []', None),
1895    (
1896"""
1897config:
1898  elf_file: relative/path/dummy.elf
1899  hex_file: /absolute/path/build_dir/zephyr/dummy.hex
1900""",
1901"""
1902config:
1903  elf_file: relative/path/dummy.elf
1904  hex_file: dummy.hex
1905"""
1906    ),
1907]
1908
1909@pytest.mark.parametrize(
1910    'runners_text, expected_write_text',
1911    TESTDATA_11,
1912    ids=['no file', 'no config', 'valid']
1913)
1914def test_projectbuilder_sanitize_runners_file(
1915    mocked_jobserver,
1916    runners_text,
1917    expected_write_text
1918):
1919    def mock_exists(fname):
1920        return runners_text is not None
1921
1922    instance_mock = mock.Mock()
1923    instance_mock.build_dir = '/absolute/path/build_dir'
1924    env_mock = mock.Mock()
1925
1926    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1927
1928    with mock.patch('os.path.exists', mock_exists), \
1929         mock.patch('builtins.open',
1930                    mock.mock_open(read_data=runners_text)) as f:
1931        pb._sanitize_runners_file()
1932
1933    if expected_write_text is not None:
1934        f().write.assert_called_with(expected_write_text)
1935    else:
1936        f().write.assert_not_called()
1937
1938
1939TESTDATA_12 = [
1940    (
1941        {
1942            'CMakeCache.txt': mock.mock_open(
1943                read_data='canonical/zephyr/base/dummy.file: ERROR'
1944            )
1945        },
1946        {
1947            'CMakeCache.txt': 'dummy.file: ERROR'
1948        }
1949    ),
1950    (
1951        {
1952            os.path.join('zephyr', 'runners.yaml'): mock.mock_open(
1953                read_data='There was canonical/zephyr/base/dummy.file here'
1954            )
1955        },
1956        {
1957            os.path.join('zephyr', 'runners.yaml'): 'There was dummy.file here'
1958        }
1959    ),
1960]
1961
1962@pytest.mark.parametrize(
1963    'text_mocks, expected_write_texts',
1964    TESTDATA_12,
1965    ids=['CMakeCache file', 'runners.yaml file']
1966)
1967def test_projectbuilder_sanitize_zephyr_base_from_files(
1968    mocked_jobserver,
1969    text_mocks,
1970    expected_write_texts
1971):
1972    build_dir_path = 'canonical/zephyr/base/build_dir/'
1973
1974    def mock_exists(fname):
1975        if not fname.startswith(build_dir_path):
1976            return False
1977        return fname[len(build_dir_path):] in text_mocks
1978
1979    def mock_open(fname, *args, **kwargs):
1980        if not fname.startswith(build_dir_path):
1981            raise FileNotFoundError(errno.ENOENT, f'File {fname} not found.')
1982        return text_mocks[fname[len(build_dir_path):]]()
1983
1984    instance_mock = mock.Mock()
1985    instance_mock.build_dir = build_dir_path
1986    env_mock = mock.Mock()
1987
1988    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1989
1990    with mock.patch('os.path.exists', mock_exists), \
1991         mock.patch('builtins.open', mock_open), \
1992         mock.patch('twisterlib.runner.canonical_zephyr_base',
1993                    'canonical/zephyr/base'):
1994        pb._sanitize_zephyr_base_from_files()
1995
1996    for fname, fhandler in text_mocks.items():
1997        fhandler().write.assert_called_with(expected_write_texts[fname])
1998
1999
2000TESTDATA_13 = [
2001    (
2002        TwisterStatus.ERROR, True, True, False,
2003        ['INFO      20/25 dummy platform' \
2004         '            dummy.testsuite.name' \
2005         '                               ERROR dummy reason (cmake)'],
2006        None
2007    ),
2008    (
2009        TwisterStatus.FAIL, False, False, False,
2010        ['ERROR     dummy platform' \
2011         '            dummy.testsuite.name' \
2012         '                               FAILED: dummy reason'],
2013        'INFO    - Total complete:   20/  25  80%' \
2014        '  built (not run):    0, filtered:    3, failed:    3, error:    1'
2015    ),
2016    (
2017        TwisterStatus.SKIP, True, False, False,
2018        ['INFO      20/25 dummy platform' \
2019         '            dummy.testsuite.name' \
2020         '                               SKIPPED (dummy reason)'],
2021        None
2022    ),
2023    (
2024        TwisterStatus.FILTER, False, False, False,
2025        [],
2026        'INFO    - Total complete:   20/  25  80%' \
2027        '  built (not run):    0, filtered:    4, failed:    2, error:    1'
2028    ),
2029    (
2030        TwisterStatus.PASS, True, False, True,
2031        ['INFO      20/25 dummy platform' \
2032         '            dummy.testsuite.name' \
2033         '                               PASSED' \
2034         ' (dummy handler type: dummy dut, 60.000s)'],
2035        None
2036    ),
2037    (
2038        TwisterStatus.PASS, True, False, False,
2039        ['INFO      20/25 dummy platform' \
2040         '            dummy.testsuite.name' \
2041         '                               PASSED (build)'],
2042        None
2043    ),
2044    (
2045        'unknown status', False, False, False,
2046        ['Unknown status = unknown status'],
2047        'INFO    - Total complete:   20/  25  80%'
2048        '  built (not run):    0, filtered:    3, failed:    2, error:    1\r'
2049    )
2050]
2051
2052@pytest.mark.parametrize(
2053    'status, verbose, cmake_only, ready_run, expected_logs, expected_out',
2054    TESTDATA_13,
2055    ids=['verbose error cmake only', 'failed', 'verbose skipped', 'filtered',
2056         'verbose passed ready run', 'verbose passed', 'unknown status']
2057)
2058def test_projectbuilder_report_out(
2059    capfd,
2060    caplog,
2061    mocked_jobserver,
2062    status,
2063    verbose,
2064    cmake_only,
2065    ready_run,
2066    expected_logs,
2067    expected_out
2068):
2069    instance_mock = mock.Mock()
2070    instance_mock.handler.type_str = 'dummy handler type'
2071    instance_mock.handler.seed = 123
2072    instance_mock.handler.ready = ready_run
2073    instance_mock.run = ready_run
2074    instance_mock.dut = 'dummy dut'
2075    instance_mock.execution_time = 60
2076    instance_mock.platform.name = 'dummy platform'
2077    instance_mock.status = status
2078    instance_mock.reason = 'dummy reason'
2079    instance_mock.testsuite.name = 'dummy.testsuite.name'
2080    skip_mock_tc = mock.Mock(status=TwisterStatus.SKIP, reason=None)
2081    skip_mock_tc.name = 'mocked_testcase_to_skip'
2082    unknown_mock_tc = mock.Mock(status=mock.Mock(value='dummystatus'), reason=None)
2083    unknown_mock_tc.name = 'mocked_testcase_unknown'
2084    instance_mock.testsuite.testcases = [unknown_mock_tc for _ in range(25)]
2085    instance_mock.testcases = [unknown_mock_tc for _ in range(24)] + \
2086                              [skip_mock_tc]
2087    env_mock = mock.Mock()
2088
2089    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2090    pb.options.verbose = verbose
2091    pb.options.cmake_only = cmake_only
2092    pb.options.seed = 123
2093    pb.log_info_file = mock.Mock()
2094
2095    results_mock = mock.Mock(
2096        total = 25,
2097        done = 19,
2098        passed = 17,
2099        notrun = 0,
2100        failed = 2,
2101        filtered_configs = 3,
2102        filtered_runtime = 0,
2103        filtered_static = 0,
2104        error = 1,
2105        cases = 0,
2106        filtered_cases = 0,
2107        skipped_cases = 4,
2108        failed_cases = 0,
2109        error_cases = 0,
2110        blocked_cases = 0,
2111        passed_cases = 0,
2112        none_cases = 0,
2113        started_cases = 0
2114    )
2115    results_mock.iteration = 1
2116    def results_done_increment(value=1, decrement=False):
2117        results_mock.done += value * (-1 if decrement else 1)
2118    results_mock.done_increment = results_done_increment
2119    def filtered_configs_increment(value=1, decrement=False):
2120        results_mock.filtered_configs += value * (-1 if decrement else 1)
2121    results_mock.filtered_configs_increment = filtered_configs_increment
2122    def filtered_static_increment(value=1, decrement=False):
2123        results_mock.filtered_static += value * (-1 if decrement else 1)
2124    results_mock.filtered_static_increment = filtered_static_increment
2125    def filtered_runtime_increment(value=1, decrement=False):
2126        results_mock.filtered_runtime += value * (-1 if decrement else 1)
2127    results_mock.filtered_runtime_increment = filtered_runtime_increment
2128    def failed_increment(value=1, decrement=False):
2129        results_mock.failed += value * (-1 if decrement else 1)
2130    results_mock.failed_increment = failed_increment
2131    def notrun_increment(value=1, decrement=False):
2132        results_mock.notrun += value * (-1 if decrement else 1)
2133    results_mock.notrun_increment = notrun_increment
2134
2135    pb.report_out(results_mock)
2136
2137    assert results_mock.cases_increment.call_args_list == [mock.call(25)]
2138
2139    trim_actual_log = re.sub(
2140        r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2141        '',
2142        caplog.text
2143    )
2144    trim_actual_log = re.sub(r'twister:runner.py:\d+', '', trim_actual_log)
2145
2146    assert all([log in trim_actual_log for log in expected_logs])
2147
2148    print(trim_actual_log)
2149    if expected_out:
2150        out, err = capfd.readouterr()
2151        sys.stdout.write(out)
2152        sys.stderr.write(err)
2153
2154        # Remove 7b ANSI C1 escape sequences (colours)
2155        out = re.sub(
2156            r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2157            '',
2158            out
2159        )
2160
2161        assert expected_out in out
2162
2163
2164def test_projectbuilder_cmake_assemble_args():
2165    extra_args = ['CONFIG_FOO=y', 'DUMMY_EXTRA="yes"']
2166    handler = mock.Mock(ready=True, args=['dummy_handler'])
2167    extra_conf_files = ['extrafile1.conf', 'extrafile2.conf']
2168    extra_overlay_confs = ['extra_overlay_conf']
2169    extra_dtc_overlay_files = ['overlay1.dtc', 'overlay2.dtc']
2170    cmake_extra_args = ['CMAKE1="yes"', 'CMAKE2=n']
2171    build_dir = os.path.join('build', 'dir')
2172
2173    with mock.patch('os.path.exists', return_value=True):
2174        results = ProjectBuilder.cmake_assemble_args(extra_args, handler,
2175                                                     extra_conf_files,
2176                                                     extra_overlay_confs,
2177                                                     extra_dtc_overlay_files,
2178                                                     cmake_extra_args,
2179                                                     build_dir)
2180
2181    expected_results = [
2182        '-DCONFIG_FOO=y',
2183        '-DCMAKE1=\"yes\"',
2184        '-DCMAKE2=n',
2185        '-DDUMMY_EXTRA=yes',
2186        '-Ddummy_handler',
2187        '-DCONF_FILE=extrafile1.conf;extrafile2.conf',
2188        '-DDTC_OVERLAY_FILE=overlay1.dtc;overlay2.dtc',
2189        f'-DOVERLAY_CONFIG=extra_overlay_conf ' \
2190        f'{os.path.join("build", "dir", "twister", "testsuite_extra.conf")}'
2191    ]
2192
2193    assert results == expected_results
2194
2195
2196def test_projectbuilder_cmake():
2197    instance_mock = mock.Mock()
2198    instance_mock.handler = 'dummy handler'
2199    instance_mock.build_dir = os.path.join('build', 'dir')
2200    env_mock = mock.Mock()
2201
2202    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2203    pb.build_dir = 'build_dir'
2204    pb.testsuite.extra_args = ['some', 'args']
2205    pb.testsuite.extra_conf_files = ['some', 'files1']
2206    pb.testsuite.extra_overlay_confs = ['some', 'files2']
2207    pb.testsuite.extra_dtc_overlay_files = ['some', 'files3']
2208    pb.options.extra_args = ['other', 'args']
2209    pb.cmake_assemble_args = mock.Mock(return_value=['dummy'])
2210    cmake_res_mock = mock.Mock()
2211    pb.run_cmake = mock.Mock(return_value=cmake_res_mock)
2212
2213    res = pb.cmake(['dummy filter'])
2214
2215    assert res == cmake_res_mock
2216    pb.cmake_assemble_args.assert_called_once_with(
2217        pb.testsuite.extra_args,
2218        pb.instance.handler,
2219        pb.testsuite.extra_conf_files,
2220        pb.testsuite.extra_overlay_confs,
2221        pb.testsuite.extra_dtc_overlay_files,
2222        pb.options.extra_args,
2223        pb.instance.build_dir
2224    )
2225    pb.run_cmake.assert_called_once_with(['dummy'], ['dummy filter'])
2226
2227
2228def test_projectbuilder_build(mocked_jobserver):
2229    instance_mock = mock.Mock()
2230    instance_mock.testsuite.harness = 'test'
2231    env_mock = mock.Mock()
2232
2233    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2234
2235    pb.build_dir = 'build_dir'
2236    pb.run_build = mock.Mock(return_value={'dummy': 'dummy'})
2237
2238    res = pb.build()
2239
2240    pb.run_build.assert_called_once_with(['--build', 'build_dir'])
2241    assert res == {'dummy': 'dummy'}
2242
2243
2244TESTDATA_14 = [
2245    (
2246        True,
2247        'device',
2248        234,
2249        'native_sim',
2250        'posix',
2251        {'CONFIG_FAKE_ENTROPY_NATIVE_POSIX': 'y'},
2252        'pytest',
2253        True,
2254        True,
2255        True,
2256        True,
2257        True,
2258        False
2259    ),
2260    (
2261        True,
2262        'not device',
2263        None,
2264        'native_sim',
2265        'not posix',
2266        {'CONFIG_FAKE_ENTROPY_NATIVE_POSIX': 'y'},
2267        'not pytest',
2268        False,
2269        False,
2270        False,
2271        False,
2272        False,
2273        True
2274    ),
2275    (
2276        False,
2277        'device',
2278        234,
2279        'native_sim',
2280        'posix',
2281        {'CONFIG_FAKE_ENTROPY_NATIVE_POSIX': 'y'},
2282        'pytest',
2283        False,
2284        False,
2285        False,
2286        False,
2287        False,
2288        False
2289    ),
2290]
2291
2292@pytest.mark.parametrize(
2293    'ready, type_str, seed, platform_name, platform_arch, defconfig, harness,' \
2294    ' expect_duts, expect_parse_generated, expect_seed,' \
2295    ' expect_extra_test_args, expect_pytest, expect_handle',
2296    TESTDATA_14,
2297    ids=['pytest full', 'not pytest minimal', 'not ready']
2298)
2299def test_projectbuilder_run(
2300    mocked_jobserver,
2301    ready,
2302    type_str,
2303    seed,
2304    platform_name,
2305    platform_arch,
2306    defconfig,
2307    harness,
2308    expect_duts,
2309    expect_parse_generated,
2310    expect_seed,
2311    expect_extra_test_args,
2312    expect_pytest,
2313    expect_handle
2314):
2315    pytest_mock = mock.Mock(spec=Pytest)
2316    harness_mock = mock.Mock()
2317
2318    def mock_harness(name):
2319        if name == 'Pytest':
2320            return pytest_mock
2321        else:
2322            return harness_mock
2323
2324    instance_mock = mock.Mock()
2325    instance_mock.handler.get_test_timeout = mock.Mock(return_value=60)
2326    instance_mock.handler.seed = 123
2327    instance_mock.handler.ready = ready
2328    instance_mock.handler.type_str = type_str
2329    instance_mock.handler.duts = [mock.Mock(name='dummy dut')]
2330    instance_mock.platform.name = platform_name
2331    instance_mock.platform.arch = platform_arch
2332    instance_mock.testsuite.harness = harness
2333    env_mock = mock.Mock()
2334
2335    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2336    pb.options.extra_test_args = ['dummy_arg1', 'dummy_arg2']
2337    pb.duts = ['another dut']
2338    pb.options.seed = seed
2339    pb.defconfig = defconfig
2340    pb.parse_generated = mock.Mock()
2341
2342    with mock.patch('twisterlib.runner.HarnessImporter.get_harness',
2343                    mock_harness):
2344        pb.run()
2345
2346    if expect_duts:
2347        assert pb.instance.handler.duts == ['another dut']
2348
2349    if expect_parse_generated:
2350        pb.parse_generated.assert_called_once()
2351
2352    if expect_seed:
2353        assert pb.instance.handler.seed == seed
2354
2355    if expect_extra_test_args:
2356        assert pb.instance.handler.extra_test_args == ['dummy_arg1',
2357                                                       'dummy_arg2']
2358
2359    if expect_pytest:
2360        pytest_mock.pytest_run.assert_called_once_with(60)
2361
2362    if expect_handle:
2363        pb.instance.handler.handle.assert_called_once_with(harness_mock)
2364
2365
2366TESTDATA_15 = [
2367    (False, False, False, True),
2368    (True, False, True, False),
2369    (False, True, False, True),
2370    (True, True, False, True),
2371]
2372
2373@pytest.mark.parametrize(
2374    'enable_size_report, cmake_only, expect_calc_size, expect_zeroes',
2375    TESTDATA_15,
2376    ids=['none', 'size_report', 'cmake', 'size_report+cmake']
2377)
2378def test_projectbuilder_gather_metrics(
2379    mocked_jobserver,
2380    enable_size_report,
2381    cmake_only,
2382    expect_calc_size,
2383    expect_zeroes
2384):
2385    instance_mock = mock.Mock()
2386    instance_mock.metrics = {}
2387    env_mock = mock.Mock()
2388
2389    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2390    pb.options.enable_size_report = enable_size_report
2391    pb.options.create_rom_ram_report = False
2392    pb.options.cmake_only = cmake_only
2393    pb.calc_size = mock.Mock()
2394
2395    pb.gather_metrics(instance_mock)
2396
2397    if expect_calc_size:
2398        pb.calc_size.assert_called_once()
2399
2400    if expect_zeroes:
2401        assert instance_mock.metrics['used_ram'] == 0
2402        assert instance_mock.metrics['used_rom'] == 0
2403        assert instance_mock.metrics['available_rom'] == 0
2404        assert instance_mock.metrics['available_ram'] == 0
2405        assert instance_mock.metrics['unrecognized'] == []
2406
2407
2408TESTDATA_16 = [
2409    (TwisterStatus.ERROR, mock.ANY, False, False, False),
2410    (TwisterStatus.FAIL, mock.ANY, False, False, False),
2411    (TwisterStatus.SKIP, mock.ANY, False, False, False),
2412    (TwisterStatus.FILTER, 'native', False, False, True),
2413    (TwisterStatus.PASS, 'qemu', False, False, True),
2414    (TwisterStatus.FILTER, 'unit', False, False, True),
2415    (TwisterStatus.FILTER, 'mcu', True, True, False),
2416    (TwisterStatus.PASS, 'frdm_k64f', False, True, False),
2417]
2418
2419@pytest.mark.parametrize(
2420    'status, platform_type, expect_warnings, expect_calcs, expect_zeroes',
2421    TESTDATA_16,
2422    ids=[x[0] + (', ' + x[1]) if x[1] != mock.ANY else '' for x in TESTDATA_16]
2423)
2424def test_projectbuilder_calc_size(
2425    status,
2426    platform_type,
2427    expect_warnings,
2428    expect_calcs,
2429    expect_zeroes
2430):
2431    size_calc_mock = mock.Mock()
2432
2433    instance_mock = mock.Mock()
2434    instance_mock.status = status
2435    instance_mock.platform.type = platform_type
2436    instance_mock.metrics = {}
2437    instance_mock.calculate_sizes = mock.Mock(return_value=size_calc_mock)
2438
2439    from_buildlog = True
2440
2441    ProjectBuilder.calc_size(instance_mock, from_buildlog)
2442
2443    if expect_calcs:
2444        instance_mock.calculate_sizes.assert_called_once_with(
2445            from_buildlog=from_buildlog,
2446            generate_warning=expect_warnings
2447        )
2448
2449        assert instance_mock.metrics['used_ram'] == \
2450               size_calc_mock.get_used_ram()
2451        assert instance_mock.metrics['used_rom'] == \
2452               size_calc_mock.get_used_rom()
2453        assert instance_mock.metrics['available_rom'] == \
2454               size_calc_mock.get_available_rom()
2455        assert instance_mock.metrics['available_ram'] == \
2456               size_calc_mock.get_available_ram()
2457        assert instance_mock.metrics['unrecognized'] == \
2458               size_calc_mock.unrecognized_sections()
2459
2460    if expect_zeroes:
2461        assert instance_mock.metrics['used_ram'] == 0
2462        assert instance_mock.metrics['used_rom'] == 0
2463        assert instance_mock.metrics['available_rom'] == 0
2464        assert instance_mock.metrics['available_ram'] == 0
2465        assert instance_mock.metrics['unrecognized'] == []
2466
2467    if expect_calcs or expect_zeroes:
2468        assert instance_mock.metrics['handler_time'] == \
2469               instance_mock.execution_time
2470    else:
2471        assert instance_mock.metrics == {}
2472
2473
2474TESTDATA_17 = [
2475    ('linux', 'posix', {'jobs': 4}, True, 32, 'GNUMakeJobClient'),
2476    ('linux', 'posix', {'build_only': True}, False, 16, 'GNUMakeJobServer'),
2477    ('linux', '???', {}, False, 8, 'JobClient'),
2478    ('linux', '???', {'jobs': 4}, False, 4, 'JobClient'),
2479]
2480
2481@pytest.mark.parametrize(
2482    'platform, os_name, options, jobclient_from_environ, expected_jobs,' \
2483    ' expected_jobserver',
2484    TESTDATA_17,
2485    ids=['GNUMakeJobClient', 'GNUMakeJobServer',
2486         'JobClient', 'Jobclient+options']
2487)
2488def test_twisterrunner_run(
2489    caplog,
2490    platform,
2491    os_name,
2492    options,
2493    jobclient_from_environ,
2494    expected_jobs,
2495    expected_jobserver
2496):
2497    def mock_client_from_environ(jobs):
2498        if jobclient_from_environ:
2499            jobclient_mock = mock.Mock(jobs=32)
2500            jobclient_mock.name = 'GNUMakeJobClient'
2501            return jobclient_mock
2502        return None
2503
2504    instances = {'dummy instance': mock.Mock(metrics={'k': 'v'})}
2505    suites = [mock.Mock()]
2506    env_mock = mock.Mock()
2507
2508    tr = TwisterRunner(instances, suites, env=env_mock)
2509    tr.options.retry_failed = 2
2510    tr.options.retry_interval = 10
2511    tr.options.retry_build_errors = True
2512    tr.options.jobs = None
2513    tr.options.build_only = None
2514    for k, v in options.items():
2515        setattr(tr.options, k, v)
2516    tr.update_counting_before_pipeline = mock.Mock()
2517    tr.execute = mock.Mock()
2518    tr.show_brief = mock.Mock()
2519
2520    gnumakejobserver_mock = mock.Mock()
2521    gnumakejobserver_mock().name='GNUMakeJobServer'
2522    jobclient_mock = mock.Mock()
2523    jobclient_mock().name='JobClient'
2524
2525    pipeline_q = queue.LifoQueue()
2526    done_q = queue.LifoQueue()
2527    done_instance = mock.Mock(
2528        metrics={'k2': 'v2'},
2529        execution_time=30
2530    )
2531    done_instance.name='dummy instance'
2532    done_q.put(done_instance)
2533    manager_mock = mock.Mock()
2534    manager_mock().LifoQueue = mock.Mock(
2535        side_effect=iter([pipeline_q, done_q])
2536    )
2537
2538    results_mock = mock.Mock()
2539    results_mock().error = 1
2540    results_mock().iteration = 0
2541    results_mock().failed = 2
2542    results_mock().total = 9
2543
2544    def iteration_increment(value=1, decrement=False):
2545        results_mock().iteration += value * (-1 if decrement else 1)
2546    results_mock().iteration_increment = iteration_increment
2547
2548    with mock.patch('twisterlib.runner.ExecutionCounter', results_mock), \
2549         mock.patch('twisterlib.runner.BaseManager', manager_mock), \
2550         mock.patch('twisterlib.runner.GNUMakeJobClient.from_environ',
2551                    mock_client_from_environ), \
2552         mock.patch('twisterlib.runner.GNUMakeJobServer',
2553                    gnumakejobserver_mock), \
2554         mock.patch('twisterlib.runner.JobClient', jobclient_mock), \
2555         mock.patch('multiprocessing.cpu_count', return_value=8), \
2556         mock.patch('sys.platform', platform), \
2557         mock.patch('time.sleep', mock.Mock()), \
2558         mock.patch('os.name', os_name):
2559        tr.run()
2560
2561    assert f'JOBS: {expected_jobs}' in caplog.text
2562
2563    assert tr.jobserver.name == expected_jobserver
2564
2565    assert tr.instances['dummy instance'].metrics == {
2566        'k': 'v',
2567        'k2': 'v2',
2568        'handler_time': 30,
2569        'unrecognized': []
2570    }
2571
2572    assert results_mock().error == 0
2573
2574
2575def test_twisterrunner_update_counting_before_pipeline():
2576    instances = {
2577        'dummy1': mock.Mock(
2578            status=TwisterStatus.FILTER,
2579            reason='runtime filter',
2580            testsuite=mock.Mock(
2581                testcases=[mock.Mock()]
2582            )
2583        ),
2584        'dummy2': mock.Mock(
2585            status=TwisterStatus.FILTER,
2586            reason='static filter',
2587            testsuite=mock.Mock(
2588                testcases=[mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock()]
2589            )
2590        ),
2591        'dummy3': mock.Mock(
2592            status=TwisterStatus.ERROR,
2593            reason='error',
2594            testsuite=mock.Mock(
2595                testcases=[mock.Mock()]
2596            )
2597        ),
2598        'dummy4': mock.Mock(
2599            status=TwisterStatus.PASS,
2600            reason='OK',
2601            testsuite=mock.Mock(
2602                testcases=[mock.Mock()]
2603            )
2604        ),
2605        'dummy5': mock.Mock(
2606            status=TwisterStatus.SKIP,
2607            reason=None,
2608            testsuite=mock.Mock(
2609                testcases=[mock.Mock()]
2610            )
2611        )
2612    }
2613    suites = [mock.Mock()]
2614    env_mock = mock.Mock()
2615
2616    tr = TwisterRunner(instances, suites, env=env_mock)
2617    tr.results = mock.Mock(
2618        total = 0,
2619        done = 0,
2620        passed = 0,
2621        failed = 0,
2622        filtered_configs = 0,
2623        filtered_runtime = 0,
2624        filtered_static = 0,
2625        error = 0,
2626        cases = 0,
2627        filtered_cases = 0,
2628        skipped_cases = 0,
2629        failed_cases = 0,
2630        error_cases = 0,
2631        blocked_cases = 0,
2632        passed_cases = 0,
2633        none_cases = 0,
2634        started_cases = 0
2635    )
2636    def filtered_configs_increment(value=1, decrement=False):
2637        tr.results.filtered_configs += value * (-1 if decrement else 1)
2638    tr.results.filtered_configs_increment = filtered_configs_increment
2639    def filtered_static_increment(value=1, decrement=False):
2640        tr.results.filtered_static += value * (-1 if decrement else 1)
2641    tr.results.filtered_static_increment = filtered_static_increment
2642    def error_increment(value=1, decrement=False):
2643        tr.results.error += value * (-1 if decrement else 1)
2644    tr.results.error_increment = error_increment
2645    def cases_increment(value=1, decrement=False):
2646        tr.results.cases += value * (-1 if decrement else 1)
2647    tr.results.cases_increment = cases_increment
2648    def filtered_cases_increment(value=1, decrement=False):
2649        tr.results.filtered_cases += value * (-1 if decrement else 1)
2650    tr.results.filtered_cases_increment = filtered_cases_increment
2651
2652    tr.update_counting_before_pipeline()
2653
2654    assert tr.results.filtered_static == 1
2655    assert tr.results.filtered_configs == 1
2656    assert tr.results.filtered_cases == 4
2657    assert tr.results.cases == 4
2658    assert tr.results.error == 1
2659
2660
2661def test_twisterrunner_show_brief(caplog):
2662    instances = {
2663        'dummy1': mock.Mock(),
2664        'dummy2': mock.Mock(),
2665        'dummy3': mock.Mock(),
2666        'dummy4': mock.Mock(),
2667        'dummy5': mock.Mock()
2668    }
2669    suites = [mock.Mock(), mock.Mock()]
2670    env_mock = mock.Mock()
2671
2672    tr = TwisterRunner(instances, suites, env=env_mock)
2673    tr.results = mock.Mock(
2674        filtered_static = 3,
2675        filtered_configs = 4,
2676        skipped_cases = 0,
2677        cases = 0,
2678        error = 0
2679    )
2680
2681    tr.show_brief()
2682
2683    log = '2 test scenarios (5 configurations) selected,' \
2684          ' 4 configurations filtered (3 by static filter, 1 at runtime).'
2685
2686    assert log in caplog.text
2687
2688
2689TESTDATA_18 = [
2690    (False, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2691    (False, False, True, [{'op': 'filter', 'test': mock.ANY},
2692                          {'op': 'cmake', 'test': mock.ANY}]),
2693    (False, True, True, [{'op': 'run', 'test': mock.ANY},
2694                         {'op': 'run', 'test': mock.ANY}]),
2695    (False, True, False, [{'op': 'run', 'test': mock.ANY}]),
2696    (True, True, False, [{'op': 'cmake', 'test': mock.ANY}]),
2697    (True, True, True, [{'op': 'filter', 'test': mock.ANY},
2698                        {'op': 'cmake', 'test': mock.ANY}]),
2699    (True, False, True, [{'op': 'filter', 'test': mock.ANY},
2700                         {'op': 'cmake', 'test': mock.ANY}]),
2701    (True, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2702]
2703
2704@pytest.mark.parametrize(
2705    'build_only, test_only, retry_build_errors, expected_pipeline_elements',
2706    TESTDATA_18,
2707    ids=['none', 'retry', 'test+retry', 'test', 'build+test',
2708         'build+test+retry', 'build+retry', 'build']
2709)
2710def test_twisterrunner_add_tasks_to_queue(
2711    build_only,
2712    test_only,
2713    retry_build_errors,
2714    expected_pipeline_elements
2715):
2716    def mock_get_cmake_filter_stages(filter, keys):
2717        return [filter]
2718
2719    instances = {
2720        'dummy1': mock.Mock(run=True, retries=0, status=TwisterStatus.PASS, build_dir="/tmp"),
2721        'dummy2': mock.Mock(run=True, retries=0, status=TwisterStatus.SKIP, build_dir="/tmp"),
2722        'dummy3': mock.Mock(run=True, retries=0, status=TwisterStatus.FILTER, build_dir="/tmp"),
2723        'dummy4': mock.Mock(run=True, retries=0, status=TwisterStatus.ERROR, build_dir="/tmp"),
2724        'dummy5': mock.Mock(run=True, retries=0, status=TwisterStatus.FAIL, build_dir="/tmp")
2725    }
2726    instances['dummy4'].testsuite.filter = 'some'
2727    instances['dummy5'].testsuite.filter = 'full'
2728    suites = [mock.Mock(), mock.Mock()]
2729    env_mock = mock.Mock()
2730
2731    tr = TwisterRunner(instances, suites, env=env_mock)
2732    tr.get_cmake_filter_stages = mock.Mock(
2733        side_effect=mock_get_cmake_filter_stages
2734    )
2735    tr.results = mock.Mock(iteration=0)
2736
2737    pipeline_mock = mock.Mock()
2738
2739    tr.add_tasks_to_queue(
2740        pipeline_mock,
2741        build_only,
2742        test_only,
2743        retry_build_errors
2744    )
2745
2746    assert all(
2747        [build_only != instance.run for instance in instances.values()]
2748    )
2749
2750    tr.get_cmake_filter_stages.assert_any_call('full', mock.ANY)
2751    if retry_build_errors:
2752        tr.get_cmake_filter_stages.assert_any_call('some', mock.ANY)
2753
2754    print(pipeline_mock.put.call_args_list)
2755    print([mock.call(el) for el in expected_pipeline_elements])
2756
2757    assert pipeline_mock.put.call_args_list == \
2758           [mock.call(el) for el in expected_pipeline_elements]
2759
2760
2761TESTDATA_19 = [
2762    ('linux'),
2763    ('nt')
2764]
2765
2766@pytest.mark.parametrize(
2767    'platform',
2768    TESTDATA_19,
2769)
2770def test_twisterrunner_pipeline_mgr(mocked_jobserver, platform):
2771    counter = 0
2772    def mock_get_nowait():
2773        nonlocal counter
2774        counter += 1
2775        if counter > 5:
2776            raise queue.Empty()
2777        return {'test': 'dummy'}
2778
2779    instances = {}
2780    suites = []
2781    env_mock = mock.Mock()
2782
2783    tr = TwisterRunner(instances, suites, env=env_mock)
2784    tr.jobserver = mock.Mock(
2785        get_job=mock.Mock(
2786            return_value=nullcontext()
2787        )
2788    )
2789
2790    pipeline_mock = mock.Mock()
2791    pipeline_mock.get_nowait = mock.Mock(side_effect=mock_get_nowait)
2792    done_queue_mock = mock.Mock()
2793    lock_mock = mock.Mock()
2794    results_mock = mock.Mock()
2795
2796    with mock.patch('sys.platform', platform), \
2797         mock.patch('twisterlib.runner.ProjectBuilder',\
2798                    return_value=mock.Mock()) as pb:
2799        tr.pipeline_mgr(pipeline_mock, done_queue_mock, lock_mock, results_mock)
2800
2801    assert len(pb().process.call_args_list) == 5
2802
2803    if platform == 'linux':
2804        tr.jobserver.get_job.assert_called_once()
2805
2806
2807def test_twisterrunner_execute(caplog):
2808    counter = 0
2809    def mock_join():
2810        nonlocal counter
2811        counter += 1
2812        if counter > 3:
2813            raise KeyboardInterrupt()
2814
2815    instances = {}
2816    suites = []
2817    env_mock = mock.Mock()
2818
2819    tr = TwisterRunner(instances, suites, env=env_mock)
2820    tr.add_tasks_to_queue = mock.Mock()
2821    tr.jobs = 5
2822
2823    process_mock = mock.Mock()
2824    process_mock().join = mock.Mock(side_effect=mock_join)
2825    process_mock().exitcode = 0
2826    pipeline_mock = mock.Mock()
2827    done_mock = mock.Mock()
2828
2829    with mock.patch('twisterlib.runner.Process', process_mock):
2830        tr.execute(pipeline_mock, done_mock)
2831
2832    assert 'Execution interrupted' in caplog.text
2833
2834    assert len(process_mock().start.call_args_list) == 5
2835    assert len(process_mock().join.call_args_list) == 4
2836    assert len(process_mock().terminate.call_args_list) == 5
2837
2838
2839
2840TESTDATA_20 = [
2841    ('', []),
2842    ('not ARCH in ["x86", "arc"]', ['full']),
2843    ('dt_dummy(x, y)', ['dts']),
2844    ('not CONFIG_FOO', ['kconfig']),
2845    ('dt_dummy and CONFIG_FOO', ['dts', 'kconfig']),
2846]
2847
2848@pytest.mark.parametrize(
2849    'filter, expected_result',
2850    TESTDATA_20,
2851    ids=['none', 'full', 'dts', 'kconfig', 'dts+kconfig']
2852)
2853def test_twisterrunner_get_cmake_filter_stages(filter, expected_result):
2854    result = TwisterRunner.get_cmake_filter_stages(filter, ['not', 'and'])
2855
2856    assert sorted(result) == sorted(expected_result)
2857