1#!/usr/bin/env python3
2# Copyright (c) 2023 Google LLC
3#
4# SPDX-License-Identifier: Apache-2.0
5"""
6Tests for runner.py classes
7"""
8
9import errno
10import mock
11import os
12import pathlib
13import pytest
14import queue
15import re
16import subprocess
17import sys
18import yaml
19
20from contextlib import nullcontext
21from elftools.elf.sections import SymbolTableSection
22from typing import List
23
24ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
25sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
26
27from twisterlib.statuses import TwisterStatus
28from twisterlib.error import BuildError
29from twisterlib.harness import Pytest
30
31from twisterlib.runner import (
32    CMake,
33    ExecutionCounter,
34    FilterBuilder,
35    ProjectBuilder,
36    TwisterRunner
37)
38
39@pytest.fixture
40def mocked_instance(tmp_path):
41    instance = mock.Mock()
42    testsuite = mock.Mock()
43    testsuite.source_dir: str = ''
44    instance.testsuite = testsuite
45    platform = mock.Mock()
46    platform.sysbuild = False
47    platform.binaries: List[str] = []
48    instance.platform = platform
49    build_dir = tmp_path / 'build_dir'
50    os.makedirs(build_dir)
51    instance.build_dir: str = str(build_dir)
52    return instance
53
54
55@pytest.fixture
56def mocked_env():
57    env = mock.Mock()
58    options = mock.Mock()
59    options.verbose = 2
60    env.options = options
61    return env
62
63
64@pytest.fixture
65def mocked_jobserver():
66    jobserver = mock.Mock()
67    return jobserver
68
69
70@pytest.fixture
71def project_builder(mocked_instance, mocked_env, mocked_jobserver) -> ProjectBuilder:
72    project_builder = ProjectBuilder(mocked_instance, mocked_env, mocked_jobserver)
73    return project_builder
74
75
76@pytest.fixture
77def runners(project_builder: ProjectBuilder) -> dict:
78    """
79    Create runners.yaml file in build_dir/zephyr directory and return file
80    content as dict.
81    """
82    build_dir_zephyr_path = os.path.join(project_builder.instance.build_dir, 'zephyr')
83    os.makedirs(build_dir_zephyr_path)
84    runners_file_path = os.path.join(build_dir_zephyr_path, 'runners.yaml')
85    runners_content: dict = {
86        'config': {
87            'elf_file': 'zephyr.elf',
88            'hex_file': os.path.join(build_dir_zephyr_path, 'zephyr.elf'),
89            'bin_file': 'zephyr.bin',
90        }
91    }
92    with open(runners_file_path, 'w') as file:
93        yaml.dump(runners_content, file)
94
95    return runners_content
96
97
98@mock.patch("os.path.exists")
99def test_projectbuilder_cmake_assemble_args_single(m):
100    # Causes the additional_overlay_path to be appended
101    m.return_value = True
102
103    class MockHandler:
104        pass
105
106    handler = MockHandler()
107    handler.args = ["handler_arg1", "handler_arg2"]
108    handler.ready = True
109
110    assert(ProjectBuilder.cmake_assemble_args(
111        ["basearg1", "CONFIG_t=\"test\"", "SNIPPET_t=\"test\""],
112        handler,
113        ["a.conf;b.conf", "c.conf"],
114        ["extra_overlay.conf"],
115        ["x.overlay;y.overlay", "z.overlay"],
116        ["cmake1=foo", "cmake2=bar"],
117        "/builddir/",
118    ) == [
119        "-DCONFIG_t=\"test\"",
120        "-Dcmake1=foo", "-Dcmake2=bar",
121        "-Dbasearg1", "-DSNIPPET_t=test",
122        "-Dhandler_arg1", "-Dhandler_arg2",
123        "-DCONF_FILE=a.conf;b.conf;c.conf",
124        "-DDTC_OVERLAY_FILE=x.overlay;y.overlay;z.overlay",
125        "-DOVERLAY_CONFIG=extra_overlay.conf "
126        "/builddir/twister/testsuite_extra.conf",
127    ])
128
129
130def test_if_default_binaries_are_taken_properly(project_builder: ProjectBuilder):
131    default_binaries = [
132        os.path.join('zephyr', 'zephyr.hex'),
133        os.path.join('zephyr', 'zephyr.bin'),
134        os.path.join('zephyr', 'zephyr.elf'),
135        os.path.join('zephyr', 'zephyr.exe'),
136    ]
137    project_builder.instance.sysbuild = False
138    binaries = project_builder._get_binaries()
139    assert sorted(binaries) == sorted(default_binaries)
140
141
142def test_if_binaries_from_platform_are_taken_properly(project_builder: ProjectBuilder):
143    platform_binaries = ['spi_image.bin']
144    project_builder.platform.binaries = platform_binaries
145    project_builder.instance.sysbuild = False
146    platform_binaries_expected = [os.path.join('zephyr', bin) for bin in platform_binaries]
147    binaries = project_builder._get_binaries()
148    assert sorted(binaries) == sorted(platform_binaries_expected)
149
150
151def test_if_binaries_from_runners_are_taken_properly(runners, project_builder: ProjectBuilder):
152    runners_binaries = list(runners['config'].values())
153    runners_binaries_expected = [bin if os.path.isabs(bin) else os.path.join('zephyr', bin) for bin in runners_binaries]
154    binaries = project_builder._get_binaries_from_runners()
155    assert sorted(binaries) == sorted(runners_binaries_expected)
156
157
158def test_if_runners_file_is_sanitized_properly(runners, project_builder: ProjectBuilder):
159    runners_file_path = os.path.join(project_builder.instance.build_dir, 'zephyr', 'runners.yaml')
160    with open(runners_file_path, 'r') as file:
161        unsanitized_runners_content = yaml.safe_load(file)
162    unsanitized_runners_binaries = list(unsanitized_runners_content['config'].values())
163    abs_paths = [bin for bin in unsanitized_runners_binaries if os.path.isabs(bin)]
164    assert len(abs_paths) > 0
165
166    project_builder._sanitize_runners_file()
167
168    with open(runners_file_path, 'r') as file:
169        sanitized_runners_content = yaml.safe_load(file)
170    sanitized_runners_binaries = list(sanitized_runners_content['config'].values())
171    abs_paths = [bin for bin in sanitized_runners_binaries if os.path.isabs(bin)]
172    assert len(abs_paths) == 0
173
174
175def test_if_zephyr_base_is_sanitized_properly(project_builder: ProjectBuilder):
176    sanitized_path_expected = os.path.join('sanitized', 'path')
177    path_to_sanitize = os.path.join(os.path.realpath(ZEPHYR_BASE), sanitized_path_expected)
178    cmakecache_file_path = os.path.join(project_builder.instance.build_dir, 'CMakeCache.txt')
179    with open(cmakecache_file_path, 'w') as file:
180        file.write(path_to_sanitize)
181
182    project_builder._sanitize_zephyr_base_from_files()
183
184    with open(cmakecache_file_path, 'r') as file:
185        sanitized_path = file.read()
186    assert sanitized_path == sanitized_path_expected
187
188
189def test_executioncounter(capfd):
190    ec = ExecutionCounter(total=12)
191
192    ec.cases = 25
193    ec.skipped_cases = 6
194    ec.error = 2
195    ec.iteration = 2
196    ec.done = 9
197    ec.passed = 6
198    ec.filtered_configs = 3
199    ec.filtered_runtime = 1
200    ec.filtered_static = 2
201    ec.failed = 1
202
203    ec.summary()
204
205    out, err = capfd.readouterr()
206    sys.stdout.write(out)
207    sys.stderr.write(err)
208
209    assert (
210"├── Total test suites: 12\n"
211"├── Processed test suites: 9\n"
212"│   ├── Filtered test suites: 3\n"
213"│   │   ├── Filtered test suites (static): 2\n"
214"│   │   └── Filtered test suites (at runtime): 1\n"
215"│   └── Selected test suites: 6\n"
216"│       ├── Skipped test suites: 0\n"
217"│       ├── Passed test suites: 6\n"
218"│       ├── Built only test suites: 0\n"
219"│       ├── Failed test suites: 1\n"
220"│       └── Errors in test suites: 2\n"
221"└── Total test cases: 25\n"
222"    ├── Filtered test cases: 0\n"
223"    └── Selected test cases: 25\n"
224"        ├── Passed test cases: 0\n"
225"        ├── Skipped test cases: 6\n"
226"        ├── Built only test cases: 0\n"
227"        ├── Blocked test cases: 0\n"
228"        ├── Failed test cases: 0\n"
229"        └── Errors in test cases: 0\n"
230    ) in out
231
232    assert ec.cases == 25
233    assert ec.skipped_cases == 6
234    assert ec.error == 2
235    assert ec.iteration == 2
236    assert ec.done == 9
237    assert ec.passed == 6
238    assert ec.filtered_configs == 3
239    assert ec.filtered_runtime == 1
240    assert ec.filtered_static == 2
241    assert ec.failed == 1
242
243
244def test_cmake_parse_generated(mocked_jobserver):
245    testsuite_mock = mock.Mock()
246    platform_mock = mock.Mock()
247    source_dir = os.path.join('source', 'dir')
248    build_dir = os.path.join('build', 'dir')
249
250    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
251                  mocked_jobserver)
252
253    result = cmake.parse_generated()
254
255    assert cmake.defconfig == {}
256    assert result == {}
257
258
259TESTDATA_1_1 = [
260    ('linux'),
261    ('nt')
262]
263TESTDATA_1_2 = [
264    (0, False, 'dummy out',
265     True, True, TwisterStatus.NOTRUN, None, False, True),
266    (0, True, '',
267     False, False, TwisterStatus.PASS, None, False, False),
268    (1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
269     True,  True, TwisterStatus.SKIP, 'FLASH overflow', True, False),
270    (1, True, 'Error: Image size (99 B) + trailer (1 B) exceeds requested size',
271     True, True, TwisterStatus.SKIP, 'imgtool overflow', True, False),
272    (1, True, 'mock.ANY',
273     True, True, TwisterStatus.ERROR, 'Build failure', False, False)
274]
275
276@pytest.mark.parametrize(
277    'return_code, is_instance_run, p_out, expect_returncode,' \
278    ' expect_writes, expected_status, expected_reason,' \
279    ' expected_change_skip, expected_add_missing',
280    TESTDATA_1_2,
281    ids=['no error, no instance run', 'no error, instance run',
282         'error - region overflow', 'error - image size exceed', 'error']
283)
284@pytest.mark.parametrize('sys_platform', TESTDATA_1_1)
285def test_cmake_run_build(
286    sys_platform,
287    return_code,
288    is_instance_run,
289    p_out,
290    expect_returncode,
291    expect_writes,
292    expected_status,
293    expected_reason,
294    expected_change_skip,
295    expected_add_missing
296):
297    process_mock = mock.Mock(
298        returncode=return_code,
299        communicate=mock.Mock(
300            return_value=(p_out.encode(sys.getdefaultencoding()), None)
301        )
302    )
303
304    def mock_popen(*args, **kwargs):
305        return process_mock
306
307    testsuite_mock = mock.Mock()
308    platform_mock = mock.Mock()
309    platform_mock.name = '<platform name>'
310    source_dir = os.path.join('source', 'dir')
311    build_dir = os.path.join('build', 'dir')
312    jobserver_mock = mock.Mock(
313        popen=mock.Mock(side_effect=mock_popen)
314    )
315    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
316    instance_mock.build_time = 0
317    instance_mock.run = is_instance_run
318    instance_mock.status = TwisterStatus.NONE
319    instance_mock.reason = None
320
321    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
322                  jobserver_mock)
323    cmake.cwd = os.path.join('dummy', 'working', 'dir')
324    cmake.instance = instance_mock
325    cmake.options = mock.Mock()
326    cmake.options.overflow_as_errors = False
327
328    cmake_path = os.path.join('dummy', 'cmake')
329
330    popen_mock = mock.Mock(side_effect=mock_popen)
331    change_mock = mock.Mock()
332
333    with mock.patch('sys.platform', sys_platform), \
334         mock.patch('shutil.which', return_value=cmake_path), \
335         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
336                    change_mock), \
337         mock.patch('builtins.open', mock.mock_open()), \
338         mock.patch('subprocess.Popen', popen_mock):
339        result = cmake.run_build(args=['arg1', 'arg2'])
340
341    expected_results = {}
342    if expect_returncode:
343        expected_results['returncode'] = return_code
344    if expected_results == {}:
345        expected_results = None
346
347    assert expected_results == result
348
349    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
350                   popen_mock
351    popen_caller.assert_called_once_with(
352        [os.path.join('dummy', 'cmake'), 'arg1', 'arg2'],
353        stdout=subprocess.PIPE,
354        stderr=subprocess.STDOUT,
355        cwd=os.path.join('dummy', 'working', 'dir')
356    )
357
358    assert cmake.instance.status == expected_status
359    assert cmake.instance.reason == expected_reason
360
361    if expected_change_skip:
362        change_mock.assert_called_once()
363
364    if expected_add_missing:
365        cmake.instance.add_missing_case_status.assert_called_once_with(
366            TwisterStatus.NOTRUN, 'Test was built only'
367        )
368
369
370TESTDATA_2_1 = [
371    ('linux'),
372    ('nt')
373]
374TESTDATA_2_2 = [
375    (True, ['dummy_stage_1', 'ds2'],
376     0, False, '',
377     True, True, False,
378     TwisterStatus.NONE, None,
379     [os.path.join('dummy', 'cmake'),
380      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1', '-DTC_NAME=testcase',
381      '-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=y',
382      '-DEXTRA_GEN_EDT_ARGS=--edtlib-Werror', '-Gdummy_generator',
383      f'-DPython3_EXECUTABLE={pathlib.Path(sys.executable).as_posix()}',
384      '-DZEPHYR_TOOLCHAIN_VARIANT=zephyr',
385      '-S' + os.path.join('source', 'dir'),
386      'arg1', 'arg2',
387      '-DBOARD=<platform name>',
388      '-DSNIPPET=dummy snippet 1;ds2',
389      '-DMODULES=dummy_stage_1,ds2',
390      '-Pzephyr_base/cmake/package_helper.cmake']),
391    (False, [],
392     1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
393     True, False, True,
394     TwisterStatus.ERROR, 'CMake build failure',
395     [os.path.join('dummy', 'cmake'),
396      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1', '-DTC_NAME=testcase',
397      '-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=n',
398      '-DEXTRA_GEN_EDT_ARGS=', '-Gdummy_generator',
399      f'-DPython3_EXECUTABLE={pathlib.Path(sys.executable).as_posix()}',
400      '-DZEPHYR_TOOLCHAIN_VARIANT=zephyr',
401      '-Szephyr_base/share/sysbuild',
402      '-DAPP_DIR=' + os.path.join('source', 'dir'),
403      'arg1', 'arg2',
404      '-DBOARD=<platform name>',
405      '-DSNIPPET=dummy snippet 1;ds2']),
406]
407
408@pytest.mark.parametrize(
409    'error_warns, f_stages,' \
410    ' return_code, is_instance_run, p_out, expect_returncode,' \
411    ' expect_filter, expect_writes, expected_status, expected_reason,' \
412    ' expected_cmd',
413    TESTDATA_2_2,
414    ids=['filter_stages with success', 'no stages with error']
415)
416@pytest.mark.parametrize('sys_platform', TESTDATA_2_1)
417def test_cmake_run_cmake(
418    sys_platform,
419    error_warns,
420    f_stages,
421    return_code,
422    is_instance_run,
423    p_out,
424    expect_returncode,
425    expect_filter,
426    expect_writes,
427    expected_status,
428    expected_reason,
429    expected_cmd
430):
431    process_mock = mock.Mock(
432        returncode=return_code,
433        communicate=mock.Mock(
434            return_value=(p_out.encode(sys.getdefaultencoding()), None)
435        )
436    )
437
438    def mock_popen(*args, **kwargs):
439        return process_mock
440
441    testsuite_mock = mock.Mock()
442    testsuite_mock.sysbuild = True
443    platform_mock = mock.Mock()
444    platform_mock.name = '<platform name>'
445    source_dir = os.path.join('source', 'dir')
446    build_dir = os.path.join('build', 'dir')
447    jobserver_mock = mock.Mock(
448        popen=mock.Mock(side_effect=mock_popen)
449    )
450    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
451    instance_mock.run = is_instance_run
452    instance_mock.run_id = 1
453    instance_mock.build_time = 0
454    instance_mock.status = TwisterStatus.NONE
455    instance_mock.reason = None
456    instance_mock.toolchain = 'zephyr'
457    instance_mock.testsuite = mock.Mock()
458    instance_mock.testsuite.name = 'testcase'
459    instance_mock.testsuite.required_snippets = ['dummy snippet 1', 'ds2']
460    instance_mock.testcases = [mock.Mock(), mock.Mock()]
461    instance_mock.testcases[0].status = TwisterStatus.NONE
462    instance_mock.testcases[1].status = TwisterStatus.NONE
463
464    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
465                  jobserver_mock)
466    cmake.cwd = os.path.join('dummy', 'working', 'dir')
467    cmake.instance = instance_mock
468    cmake.options = mock.Mock()
469    cmake.options.disable_warnings_as_errors = not error_warns
470    cmake.options.overflow_as_errors = False
471    cmake.env = mock.Mock()
472    cmake.env.generator = 'dummy_generator'
473
474    cmake_path = os.path.join('dummy', 'cmake')
475
476    popen_mock = mock.Mock(side_effect=mock_popen)
477    change_mock = mock.Mock()
478
479    with mock.patch('sys.platform', sys_platform), \
480         mock.patch('shutil.which', return_value=cmake_path), \
481         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
482                    change_mock), \
483         mock.patch('twisterlib.runner.canonical_zephyr_base',
484                    'zephyr_base'), \
485         mock.patch('builtins.open', mock.mock_open()), \
486         mock.patch('subprocess.Popen', popen_mock):
487        result = cmake.run_cmake(args=['arg1', 'arg2'], filter_stages=f_stages)
488
489    expected_results = {}
490    if expect_returncode:
491        expected_results['returncode'] = return_code
492    if expect_filter:
493        expected_results['filter'] = {}
494    if expected_results == {}:
495        expected_results = None
496
497    assert expected_results == result
498
499    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
500                   popen_mock
501    popen_caller.assert_called_once_with(
502        expected_cmd,
503        stdout=subprocess.PIPE,
504        stderr=subprocess.STDOUT,
505        cwd=os.path.join('dummy', 'working', 'dir')
506    )
507
508    assert cmake.instance.status == expected_status
509    assert cmake.instance.reason == expected_reason
510
511    for tc in cmake.instance.testcases:
512        assert tc.status == cmake.instance.status
513
514
515TESTDATA_3 = [
516    ('unit_testing', [], False, True, None, True, None, True,
517     None, None, {}, {}, None, None, [], {}),
518    (
519        'other', [], True,
520        True, ['dummy', 'west', 'options'], True,
521        None, True,
522        os.path.join('domain', 'build', 'dir', 'zephyr', '.config'),
523        os.path.join('domain', 'build', 'dir', 'zephyr', 'edt.pickle'),
524        {'CONFIG_FOO': 'no'},
525        {'dummy cache elem': 1},
526        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
527         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
528        b'dummy edt pickle contents',
529        [f'Loaded sysbuild domain data from' \
530         f' {os.path.join("build", "dir", "domains.yaml")}'],
531        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): True}
532    ),
533    (
534        'other', ['kconfig'], True,
535        True, ['dummy', 'west', 'options'], True,
536        'Dummy parse results', True,
537        os.path.join('build', 'dir', 'zephyr', '.config'),
538        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
539        {'CONFIG_FOO': 'no'},
540        {'dummy cache elem': 1},
541        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
542         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
543        b'dummy edt pickle contents',
544        [],
545        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
546    ),
547    (
548        'other', ['other'], False,
549        False, None, True,
550        'Dummy parse results', True,
551        None,
552        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
553        {},
554        {},
555        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True},
556        b'dummy edt pickle contents',
557        [],
558        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
559    ),
560    (
561        'other', ['other'], True,
562        False, None, True,
563        'Dummy parse results', True,
564        None,
565        None,
566        {},
567        {},
568        {},
569        None,
570        ['Sysbuild test will be skipped. West must be used for flashing.'],
571        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): True}
572    ),
573    (
574        'other', ['other'], False,
575        True, None, False,
576        'Dummy parse results', True,
577        None,
578        None,
579        {},
580        {'dummy cache elem': 1},
581        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
582         'dummy cache elem': 1},
583        None,
584        [],
585        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
586    ),
587    (
588        'other', ['other'], False,
589        True, None, True,
590        'Dummy parse results', True,
591        None,
592        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
593        {},
594        {'dummy cache elem': 1},
595        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
596         'dummy cache elem': 1},
597        b'dummy edt pickle contents',
598        [],
599        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
600    ),
601    (
602        'other', ['other'], False,
603        True, None, True,
604        None, True,
605        None,
606        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
607        {},
608        {'dummy cache elem': 1},
609        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
610         'dummy cache elem': 1},
611        b'dummy edt pickle contents',
612        [],
613        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): True}
614    ),
615    (
616        'other', ['other'], False,
617        True, None, True,
618        'Dummy parse results', False,
619        None,
620        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
621        {},
622        {'dummy cache elem': 1},
623        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
624         'dummy cache elem': 1},
625        b'dummy edt pickle contents',
626        [],
627        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
628         'dummy cache elem': 1}
629    ),
630    (
631        'other', ['other'], False,
632        True, None, True,
633        SyntaxError, True,
634        None,
635        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
636        {},
637        {'dummy cache elem': 1},
638        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
639         'dummy cache elem': 1},
640        b'dummy edt pickle contents',
641        ['Failed processing testsuite.yaml'],
642        SyntaxError
643    ),
644]
645
646@pytest.mark.parametrize(
647    'platform_name, filter_stages, sysbuild,' \
648    ' do_find_cache, west_flash_options, edt_exists,' \
649    ' parse_results, testsuite_filter,' \
650    ' expected_defconfig_path, expected_edt_pickle_path,' \
651    ' expected_defconfig, expected_cmakecache, expected_filter_data,' \
652    ' expected_edt,' \
653    ' expected_logs, expected_return',
654    TESTDATA_3,
655    ids=['unit testing', 'domain', 'kconfig', 'no cache',
656         'no west options', 'no edt',
657         'parse result', 'no parse result', 'no testsuite filter', 'parse err']
658)
659def test_filterbuilder_parse_generated(
660    caplog,
661    mocked_jobserver,
662    platform_name,
663    filter_stages,
664    sysbuild,
665    do_find_cache,
666    west_flash_options,
667    edt_exists,
668    parse_results,
669    testsuite_filter,
670    expected_defconfig_path,
671    expected_edt_pickle_path,
672    expected_defconfig,
673    expected_cmakecache,
674    expected_filter_data,
675    expected_edt,
676    expected_logs,
677    expected_return
678):
679    def mock_domains_from_file(*args, **kwargs):
680        dom = mock.Mock()
681        dom.build_dir = os.path.join('domain', 'build', 'dir')
682        res = mock.Mock(get_default_domain=mock.Mock(return_value=dom))
683        return res
684
685    def mock_cmakecache_from_file(*args, **kwargs):
686        if not do_find_cache:
687            raise FileNotFoundError(errno.ENOENT, 'Cache not found')
688        cache_elem = mock.Mock()
689        cache_elem.name = 'dummy cache elem'
690        cache_elem.value = 1
691        cache = [cache_elem]
692        return cache
693
694    def mock_open(filepath, *args, **kwargs):
695        if filepath == expected_defconfig_path:
696            rd = 'I am not a proper line\n' \
697                 'CONFIG_FOO="no"'
698        elif filepath == expected_edt_pickle_path:
699            rd = b'dummy edt pickle contents'
700        else:
701            raise FileNotFoundError(errno.ENOENT,
702                                    f'File {filepath} not mocked.')
703        return mock.mock_open(read_data=rd)()
704
705    def mock_parser(filter, filter_data, edt):
706        assert filter_data == expected_filter_data
707        if isinstance(parse_results, type) and \
708           issubclass(parse_results, Exception):
709            raise parse_results
710        return parse_results
711
712    def mock_pickle(datafile):
713        assert datafile.read() == expected_edt
714        return mock.Mock()
715
716    testsuite_mock = mock.Mock()
717    testsuite_mock.name = 'dummy.testsuite.name'
718    testsuite_mock.filter = testsuite_filter
719    platform_mock = mock.Mock()
720    platform_mock.name = platform_name
721    platform_mock.arch = 'dummy arch'
722    source_dir = os.path.join('source', 'dir')
723    build_dir = os.path.join('build', 'dir')
724
725    fb = FilterBuilder(testsuite_mock, platform_mock, source_dir, build_dir,
726                       mocked_jobserver)
727    instance_mock = mock.Mock()
728    instance_mock.sysbuild = 'sysbuild' if sysbuild else None
729    instance_mock.toolchain = 'zephyr'
730    fb.instance = instance_mock
731    fb.env = mock.Mock()
732    fb.env.options = mock.Mock()
733    fb.env.options.west_flash = west_flash_options
734    fb.env.options.device_testing = True
735
736    environ_mock = {'env_dummy': True}
737
738    with mock.patch('twisterlib.runner.Domains.from_file',
739                    mock_domains_from_file), \
740         mock.patch('twisterlib.runner.CMakeCache.from_file',
741                    mock_cmakecache_from_file), \
742         mock.patch('builtins.open', mock_open), \
743         mock.patch('expr_parser.parse', mock_parser), \
744         mock.patch('pickle.load', mock_pickle), \
745         mock.patch('os.path.exists', return_value=edt_exists), \
746         mock.patch('os.environ', environ_mock), \
747         pytest.raises(expected_return) if \
748             isinstance(parse_results, type) and \
749             issubclass(parse_results, Exception) else nullcontext() as err:
750        result = fb.parse_generated(filter_stages)
751
752    if err:
753        assert True
754        return
755
756    assert all([log in caplog.text for log in expected_logs])
757
758    assert fb.defconfig == expected_defconfig
759
760    assert fb.cmake_cache == expected_cmakecache
761
762    assert result == expected_return
763
764
765TESTDATA_4 = [
766    (False, False, [f"see: {os.path.join('dummy', 'path', 'dummy_file.log')}"]),
767    (True, False, [os.path.join('dummy', 'path', 'dummy_file.log'),
768                    'file contents',
769                    os.path.join('dummy', 'path', 'dummy_file.log')]),
770    (True, True, [os.path.join('dummy', 'path', 'dummy_file.log'),
771                   'Unable to read log data ([Errno 2] ERROR: dummy_file.log)',
772                   os.path.join('dummy', 'path', 'dummy_file.log')]),
773]
774
775@pytest.mark.parametrize(
776    'inline_logs, read_exception, expected_logs',
777    TESTDATA_4,
778    ids=['basic', 'inline logs', 'inline logs+read_exception']
779)
780def test_projectbuilder_log_info(
781    caplog,
782    mocked_jobserver,
783    inline_logs,
784    read_exception,
785    expected_logs
786):
787    def mock_open(filename, *args, **kwargs):
788        if read_exception:
789            raise OSError(errno.ENOENT, f'ERROR: {os.path.basename(filename)}')
790        return mock.mock_open(read_data='file contents')()
791
792    def mock_realpath(filename, *args, **kwargs):
793        return os.path.join('path', filename)
794
795    def mock_abspath(filename, *args, **kwargs):
796        return os.path.join('dummy', filename)
797
798    filename = 'dummy_file.log'
799
800    env_mock = mock.Mock()
801    instance_mock = mock.Mock()
802
803    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
804    with mock.patch('builtins.open', mock_open), \
805         mock.patch('os.path.realpath', mock_realpath), \
806         mock.patch('os.path.abspath', mock_abspath):
807        pb.log_info(filename, inline_logs)
808
809    assert all([log in caplog.text for log in expected_logs])
810
811
812TESTDATA_5 = [
813    (True, False, False, "Valgrind error", 0, 0, 'build_dir/valgrind.log'),
814    (True, False, False, "Error", 0, 0, 'build_dir/build.log'),
815    (False, True, False, None, 1024, 0, 'build_dir/handler.log'),
816    (False, True, False, None, 0, 0, 'build_dir/build.log'),
817    (False, False, True, None, 0, 1024, 'build_dir/device.log'),
818    (False, False, True, None, 0, 0, 'build_dir/build.log'),
819    (False, False, False, None, 0, 0, 'build_dir/build.log'),
820]
821
822@pytest.mark.parametrize(
823    'valgrind_log_exists, handler_log_exists, device_log_exists,' \
824    ' instance_reason, handler_log_getsize, device_log_getsize, expected_log',
825    TESTDATA_5,
826    ids=['valgrind log', 'valgrind log unused',
827         'handler log', 'handler log unused',
828         'device log', 'device log unused',
829         'no logs']
830)
831def test_projectbuilder_log_info_file(
832    caplog,
833    mocked_jobserver,
834    valgrind_log_exists,
835    handler_log_exists,
836    device_log_exists,
837    instance_reason,
838    handler_log_getsize,
839    device_log_getsize,
840    expected_log
841):
842    def mock_exists(filename, *args, **kwargs):
843        if filename == 'build_dir/handler.log':
844            return handler_log_exists
845        if filename == 'build_dir/valgrind.log':
846            return valgrind_log_exists
847        if filename == 'build_dir/device.log':
848            return device_log_exists
849        return False
850
851    def mock_getsize(filename, *args, **kwargs):
852        if filename == 'build_dir/handler.log':
853            return handler_log_getsize
854        if filename == 'build_dir/device.log':
855            return device_log_getsize
856        return 0
857
858    env_mock = mock.Mock()
859    instance_mock = mock.Mock()
860    instance_mock.reason = instance_reason
861    instance_mock.build_dir = 'build_dir'
862
863    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
864
865    log_info_mock = mock.Mock()
866
867    with mock.patch('os.path.exists', mock_exists), \
868         mock.patch('os.path.getsize', mock_getsize), \
869         mock.patch('twisterlib.runner.ProjectBuilder.log_info', log_info_mock):
870        pb.log_info_file(None)
871
872    log_info_mock.assert_called_with(expected_log, mock.ANY)
873
874
875TESTDATA_6 = [
876    (
877        {'op': 'filter'},
878        TwisterStatus.FAIL,
879        'Failed',
880        mock.ANY,
881        mock.ANY,
882        mock.ANY,
883        mock.ANY,
884        mock.ANY,
885        mock.ANY,
886        mock.ANY,
887        mock.ANY,
888        mock.ANY,
889        mock.ANY,
890        mock.ANY,
891        [],
892        {'op': 'report', 'test': mock.ANY},
893        TwisterStatus.FAIL,
894        'Failed',
895        0,
896        None
897    ),
898    (
899        {'op': 'filter'},
900        TwisterStatus.PASS,
901        mock.ANY,
902        mock.ANY,
903        mock.ANY,
904        mock.ANY,
905        mock.ANY,
906        mock.ANY,
907        mock.ANY,
908        {'filter': { 'dummy instance name': True }},
909        mock.ANY,
910        mock.ANY,
911        mock.ANY,
912        mock.ANY,
913        ['filtering dummy instance name'],
914        {'op': 'report', 'test': mock.ANY},
915        TwisterStatus.FILTER,
916        'runtime filter',
917        1,
918        (TwisterStatus.FILTER,)
919    ),
920    (
921        {'op': 'filter'},
922        TwisterStatus.PASS,
923        mock.ANY,
924        mock.ANY,
925        mock.ANY,
926        mock.ANY,
927        mock.ANY,
928        mock.ANY,
929        mock.ANY,
930        {'filter': { 'another dummy instance name': True }},
931        mock.ANY,
932        mock.ANY,
933        mock.ANY,
934        mock.ANY,
935        [],
936        {'op': 'cmake', 'test': mock.ANY},
937        TwisterStatus.PASS,
938        mock.ANY,
939        0,
940        None
941    ),
942    (
943        {'op': 'cmake'},
944        TwisterStatus.ERROR,
945        'dummy error',
946        mock.ANY,
947        mock.ANY,
948        mock.ANY,
949        mock.ANY,
950        mock.ANY,
951        mock.ANY,
952        mock.ANY,
953        mock.ANY,
954        mock.ANY,
955        mock.ANY,
956        mock.ANY,
957        [],
958        {'op': 'report', 'test': mock.ANY},
959        TwisterStatus.ERROR,
960        'dummy error',
961        0,
962        None
963    ),
964    (
965        {'op': 'cmake'},
966        TwisterStatus.NONE,
967        mock.ANY,
968        mock.ANY,
969        mock.ANY,
970        True,
971        mock.ANY,
972        mock.ANY,
973        mock.ANY,
974        mock.ANY,
975        mock.ANY,
976        mock.ANY,
977        mock.ANY,
978        mock.ANY,
979        [],
980        {'op': 'report', 'test': mock.ANY},
981        TwisterStatus.NOTRUN,
982        mock.ANY,
983        0,
984        None
985    ),
986    (
987        {'op': 'cmake'},
988        'success',
989        mock.ANY,
990        mock.ANY,
991        mock.ANY,
992        True,
993        mock.ANY,
994        mock.ANY,
995        mock.ANY,
996        mock.ANY,
997        mock.ANY,
998        mock.ANY,
999        mock.ANY,
1000        mock.ANY,
1001        [],
1002        {'op': 'report', 'test': mock.ANY},
1003        'success',
1004        mock.ANY,
1005        0,
1006        None
1007    ),
1008    (
1009        {'op': 'cmake'},
1010        'success',
1011        mock.ANY,
1012        mock.ANY,
1013        mock.ANY,
1014        False,
1015        mock.ANY,
1016        mock.ANY,
1017        mock.ANY,
1018        {'filter': {'dummy instance name': True}},
1019        mock.ANY,
1020        mock.ANY,
1021        mock.ANY,
1022        mock.ANY,
1023        ['filtering dummy instance name'],
1024        {'op': 'report', 'test': mock.ANY},
1025        TwisterStatus.FILTER,
1026        'runtime filter',
1027        1,
1028        (TwisterStatus.FILTER,) # this is a tuple
1029    ),
1030    (
1031        {'op': 'cmake'},
1032        'success',
1033        mock.ANY,
1034        mock.ANY,
1035        mock.ANY,
1036        False,
1037        mock.ANY,
1038        mock.ANY,
1039        mock.ANY,
1040        {'filter': {}},
1041        mock.ANY,
1042        mock.ANY,
1043        mock.ANY,
1044        mock.ANY,
1045        [],
1046        {'op': 'build', 'test': mock.ANY},
1047        'success',
1048        mock.ANY,
1049        0,
1050        None
1051    ),
1052    (
1053        {'op': 'build'},
1054        mock.ANY,
1055        None,
1056        mock.ANY,
1057        mock.ANY,
1058        mock.ANY,
1059        mock.ANY,
1060        mock.ANY,
1061        mock.ANY,
1062        mock.ANY,
1063        None,
1064        mock.ANY,
1065        mock.ANY,
1066        mock.ANY,
1067        ['build test: dummy instance name'],
1068        {'op': 'report', 'test': mock.ANY},
1069        TwisterStatus.ERROR,
1070        'Build Failure',
1071        0,
1072        None
1073    ),
1074    (
1075        {'op': 'build'},
1076        TwisterStatus.SKIP,
1077        mock.ANY,
1078        mock.ANY,
1079        mock.ANY,
1080        mock.ANY,
1081        mock.ANY,
1082        mock.ANY,
1083        mock.ANY,
1084        mock.ANY,
1085        {'returncode': 0},
1086        mock.ANY,
1087        mock.ANY,
1088        mock.ANY,
1089        ['build test: dummy instance name',
1090         'Determine test cases for test instance: dummy instance name'],
1091        {'op': 'gather_metrics', 'test': mock.ANY},
1092        mock.ANY,
1093        mock.ANY,
1094        0,
1095        (TwisterStatus.SKIP, mock.ANY)
1096    ),
1097    (
1098        {'op': 'build'},
1099        TwisterStatus.PASS,
1100        mock.ANY,
1101        mock.ANY,
1102        mock.ANY,
1103        mock.ANY,
1104        mock.ANY,
1105        mock.ANY,
1106        mock.ANY,
1107        mock.ANY,
1108        {'dummy': 'dummy'},
1109        mock.ANY,
1110        mock.ANY,
1111        mock.ANY,
1112        ['build test: dummy instance name'],
1113        {'op': 'report', 'test': mock.ANY},
1114        TwisterStatus.PASS,
1115        mock.ANY,
1116        0,
1117        (TwisterStatus.BLOCK, mock.ANY)
1118    ),
1119    (
1120        {'op': 'build'},
1121        'success',
1122        mock.ANY,
1123        mock.ANY,
1124        mock.ANY,
1125        mock.ANY,
1126        mock.ANY,
1127        mock.ANY,
1128        mock.ANY,
1129        mock.ANY,
1130        {'returncode': 0},
1131        mock.ANY,
1132        mock.ANY,
1133        mock.ANY,
1134        ['build test: dummy instance name',
1135         'Determine test cases for test instance: dummy instance name'],
1136        {'op': 'gather_metrics', 'test': mock.ANY},
1137        mock.ANY,
1138        mock.ANY,
1139        0,
1140        None
1141    ),
1142    (
1143        {'op': 'build'},
1144        'success',
1145        mock.ANY,
1146        mock.ANY,
1147        mock.ANY,
1148        mock.ANY,
1149        mock.ANY,
1150        mock.ANY,
1151        mock.ANY,
1152        mock.ANY,
1153        {'returncode': 0},
1154        mock.ANY,
1155        mock.ANY,
1156        BuildError,
1157        ['build test: dummy instance name',
1158         'Determine test cases for test instance: dummy instance name'],
1159        {'op': 'report', 'test': mock.ANY},
1160        TwisterStatus.ERROR,
1161        'Determine Testcases Error!',
1162        0,
1163        None
1164    ),
1165    (
1166        {'op': 'gather_metrics'},
1167        mock.ANY,
1168        mock.ANY,
1169        True,
1170        True,
1171        mock.ANY,
1172        mock.ANY,
1173        mock.ANY,
1174        mock.ANY,
1175        mock.ANY,
1176        mock.ANY,
1177        {'returncode': 0},  # metrics_res
1178        mock.ANY,
1179        mock.ANY,
1180        [],
1181        {'op': 'run', 'test': mock.ANY},
1182        mock.ANY,
1183        mock.ANY,
1184        0,
1185        None
1186    ),  # 'gather metrics, run and ready handler'
1187    (
1188        {'op': 'gather_metrics'},
1189        mock.ANY,
1190        mock.ANY,
1191        False,
1192        True,
1193        mock.ANY,
1194        mock.ANY,
1195        mock.ANY,
1196        mock.ANY,
1197        mock.ANY,
1198        mock.ANY,
1199        {'returncode': 0},  # metrics_res
1200        mock.ANY,
1201        mock.ANY,
1202        [],
1203        {'op': 'report', 'test': mock.ANY},
1204        mock.ANY,
1205        mock.ANY,
1206        0,
1207        None
1208    ),  # 'gather metrics'
1209    (
1210        {'op': 'gather_metrics'},
1211        mock.ANY,
1212        mock.ANY,
1213        False,
1214        True,
1215        mock.ANY,
1216        mock.ANY,
1217        mock.ANY,
1218        mock.ANY,
1219        mock.ANY,
1220        {'returncode': 0},  # build_res
1221        {'returncode': 1},  # metrics_res
1222        mock.ANY,
1223        mock.ANY,
1224        [],
1225        {'op': 'report', 'test': mock.ANY},
1226        'error',
1227        'Build Failure at gather_metrics.',
1228        0,
1229        None
1230    ),  # 'build ok, gather metrics fail',
1231    (
1232        {'op': 'run'},
1233        'success',
1234        'OK',
1235        mock.ANY,
1236        mock.ANY,
1237        mock.ANY,
1238        mock.ANY,
1239        mock.ANY,
1240        mock.ANY,
1241        mock.ANY,
1242        mock.ANY,
1243        mock.ANY,
1244        None,
1245        mock.ANY,
1246        ['run test: dummy instance name',
1247         'run status: dummy instance name success'],
1248        {'op': 'coverage', 'test': mock.ANY, 'status': 'success', 'reason': 'OK'},
1249        'success',
1250        'OK',
1251        0,
1252        None
1253    ),
1254    (
1255        {'op': 'run'},
1256        TwisterStatus.FAIL,
1257        mock.ANY,
1258        mock.ANY,
1259        mock.ANY,
1260        mock.ANY,
1261        mock.ANY,
1262        mock.ANY,
1263        mock.ANY,
1264        mock.ANY,
1265        mock.ANY,
1266        mock.ANY,
1267        RuntimeError,
1268        mock.ANY,
1269        ['run test: dummy instance name',
1270         'run status: dummy instance name failed',
1271         'RuntimeError: Pipeline Error!'],
1272        None,
1273        TwisterStatus.FAIL,
1274        mock.ANY,
1275        0,
1276        None
1277    ),
1278    (
1279        {'op': 'report'},
1280        mock.ANY,
1281        mock.ANY,
1282        mock.ANY,
1283        mock.ANY,
1284        mock.ANY,
1285        False,
1286        True,
1287        mock.ANY,
1288        mock.ANY,
1289        mock.ANY,
1290        mock.ANY,
1291        mock.ANY,
1292        mock.ANY,
1293        [],
1294        {'op': 'cleanup', 'mode': 'device', 'test': mock.ANY},
1295        mock.ANY,
1296        mock.ANY,
1297        0,
1298        None
1299    ),
1300    (
1301        {'op': 'report'},
1302        TwisterStatus.PASS,
1303        mock.ANY,
1304        mock.ANY,
1305        mock.ANY,
1306        mock.ANY,
1307        False,
1308        False,
1309        'pass',
1310        mock.ANY,
1311        mock.ANY,
1312        mock.ANY,
1313        mock.ANY,
1314        mock.ANY,
1315        [],
1316        {'op': 'cleanup', 'mode': 'passed', 'test': mock.ANY},
1317        mock.ANY,
1318        mock.ANY,
1319        0,
1320        None
1321    ),
1322    (
1323        {'op': 'report'},
1324        mock.ANY,
1325        mock.ANY,
1326        mock.ANY,
1327        mock.ANY,
1328        mock.ANY,
1329        False,
1330        False,
1331        'all',
1332        mock.ANY,
1333        mock.ANY,
1334        mock.ANY,
1335        mock.ANY,
1336        mock.ANY,
1337        [],
1338        {'op': 'cleanup', 'mode': 'all', 'test': mock.ANY},
1339        mock.ANY,
1340        mock.ANY,
1341        0,
1342        None
1343    ),
1344    (
1345        {'op': 'report'},
1346        mock.ANY,
1347        mock.ANY,
1348        mock.ANY,
1349        mock.ANY,
1350        mock.ANY,
1351        False,
1352        False,
1353        'other',
1354        mock.ANY,
1355        mock.ANY,
1356        mock.ANY,
1357        mock.ANY,
1358        mock.ANY,
1359        [],
1360        None,
1361        mock.ANY,
1362        mock.ANY,
1363        0,
1364        None
1365    ),
1366    (
1367        {'op': 'cleanup', 'mode': 'device'},
1368        mock.ANY,
1369        mock.ANY,
1370        mock.ANY,
1371        mock.ANY,
1372        mock.ANY,
1373        mock.ANY,
1374        mock.ANY,
1375        mock.ANY,
1376        mock.ANY,
1377        mock.ANY,
1378        mock.ANY,
1379        mock.ANY,
1380        mock.ANY,
1381        [],
1382        None,
1383        mock.ANY,
1384        mock.ANY,
1385        0,
1386        None
1387    ),
1388    (
1389        {'op': 'cleanup', 'mode': 'passed'},
1390        mock.ANY,
1391        mock.ANY,
1392        mock.ANY,
1393        mock.ANY,
1394        mock.ANY,
1395        mock.ANY,
1396        mock.ANY,
1397        mock.ANY,
1398        mock.ANY,
1399        mock.ANY,
1400        mock.ANY,
1401        mock.ANY,
1402        mock.ANY,
1403        [],
1404        None,
1405        mock.ANY,
1406        mock.ANY,
1407        0,
1408        None
1409    ),
1410    (
1411        {'op': 'cleanup', 'mode': 'all'},
1412        mock.ANY,
1413        'Valgrind error',
1414        mock.ANY,
1415        mock.ANY,
1416        mock.ANY,
1417        mock.ANY,
1418        mock.ANY,
1419        mock.ANY,
1420        mock.ANY,
1421        mock.ANY,
1422        mock.ANY,
1423        mock.ANY,
1424        mock.ANY,
1425        [],
1426        None,
1427        mock.ANY,
1428        mock.ANY,
1429        0,
1430        None
1431    ),
1432    (
1433        {'op': 'cleanup', 'mode': 'all'},
1434        mock.ANY,
1435        'CMake build failure',
1436        mock.ANY,
1437        mock.ANY,
1438        mock.ANY,
1439        mock.ANY,
1440        mock.ANY,
1441        mock.ANY,
1442        mock.ANY,
1443        mock.ANY,
1444        mock.ANY,
1445        mock.ANY,
1446        mock.ANY,
1447        [],
1448        None,
1449        mock.ANY,
1450        mock.ANY,
1451        0,
1452        None
1453    ),
1454]
1455
1456@pytest.mark.parametrize(
1457    'message,' \
1458    ' instance_status, instance_reason, instance_run, instance_handler_ready,' \
1459    ' options_cmake_only,' \
1460    ' options_coverage, options_prep_artifacts, options_runtime_artifacts,' \
1461    ' cmake_res, build_res, metrics_res,' \
1462    ' pipeline_runtime_error, determine_testcases_build_error,' \
1463    ' expected_logs, resulting_message,' \
1464    ' expected_status, expected_reason, expected_skipped, expected_missing',
1465    TESTDATA_6,
1466    ids=[
1467        'filter, failed', 'filter, cmake res', 'filter, no cmake res',
1468        'cmake, failed', 'cmake, cmake_only, no status', 'cmake, cmake_only',
1469        'cmake, no cmake_only, cmake res', 'cmake, no cmake_only, no cmake res',
1470        'build, no build res', 'build, skipped', 'build, blocked',
1471        'build, determine testcases', 'build, determine testcases Error',
1472        'gather metrics, run and ready handler', 'gather metrics',
1473        'build ok, gather metrics fail',
1474        'run', 'run, Pipeline Runtime Error',
1475        'report, prep artifacts for testing',
1476        'report, runtime artifact cleanup pass, status passed',
1477        'report, runtime artifact cleanup all', 'report, no message put',
1478        'cleanup, device', 'cleanup, mode passed', 'cleanup, mode all',
1479        'cleanup, mode all, cmake build failure'
1480    ]
1481)
1482def test_projectbuilder_process(
1483    caplog,
1484    mocked_jobserver,
1485    tmp_path,
1486    message,
1487    instance_status,
1488    instance_reason,
1489    instance_run,
1490    instance_handler_ready,
1491    options_cmake_only,
1492    options_coverage,
1493    options_prep_artifacts,
1494    options_runtime_artifacts,
1495    cmake_res,
1496    build_res,
1497    metrics_res,
1498    pipeline_runtime_error,
1499    determine_testcases_build_error,
1500    expected_logs,
1501    resulting_message,
1502    expected_status,
1503    expected_reason,
1504    expected_skipped,
1505    expected_missing
1506):
1507    def mock_pipeline_put(msg):
1508        if isinstance(pipeline_runtime_error, type) and \
1509           issubclass(pipeline_runtime_error, Exception):
1510            raise RuntimeError('Pipeline Error!')
1511
1512    def mock_determine_testcases(res):
1513        if isinstance(determine_testcases_build_error, type) and \
1514           issubclass(determine_testcases_build_error, Exception):
1515            raise BuildError('Determine Testcases Error!')
1516
1517    instance_mock = mock.Mock()
1518    instance_mock.name = 'dummy instance name'
1519    instance_mock.status = instance_status
1520    instance_mock.reason = instance_reason
1521    instance_mock.run = instance_run
1522    instance_mock.handler = mock.Mock()
1523    instance_mock.handler.ready = instance_handler_ready
1524    instance_mock.testsuite.harness = 'test'
1525    env_mock = mock.Mock()
1526
1527    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1528    pb.options = mock.Mock()
1529    pb.options.coverage = options_coverage
1530    pb.options.prep_artifacts_for_testing = options_prep_artifacts
1531    pb.options.runtime_artifact_cleanup = options_runtime_artifacts
1532    pb.options.cmake_only = options_cmake_only
1533    pb.options.outdir = tmp_path
1534    pb.options.log_file = None
1535    pb.options.log_level = "DEBUG"
1536
1537    pb.cmake = mock.Mock(return_value=cmake_res)
1538    pb.build = mock.Mock(return_value=build_res)
1539    pb.determine_testcases = mock.Mock(side_effect=mock_determine_testcases)
1540
1541    pb.report_out = mock.Mock()
1542    pb.cleanup_artifacts = mock.Mock()
1543    pb.cleanup_device_testing_artifacts = mock.Mock()
1544    pb.run = mock.Mock()
1545    pb.gather_metrics = mock.Mock(return_value=metrics_res)
1546
1547    pipeline_mock = mock.Mock(put=mock.Mock(side_effect=mock_pipeline_put))
1548    done_mock = mock.Mock()
1549    lock_mock = mock.Mock(
1550        __enter__=mock.Mock(return_value=(mock.Mock(), mock.Mock())),
1551        __exit__=mock.Mock(return_value=None)
1552    )
1553    results_mock = mock.Mock()
1554    results_mock.filtered_runtime = 0
1555
1556    pb.process(pipeline_mock, done_mock, message, lock_mock, results_mock)
1557
1558    assert all([log in caplog.text for log in expected_logs])
1559
1560    if resulting_message:
1561        pipeline_mock.put.assert_called_with(resulting_message)
1562
1563    assert pb.instance.status == expected_status
1564    assert pb.instance.reason == expected_reason
1565    assert results_mock.filtered_runtime_increment.call_args_list == [mock.call()] * expected_skipped
1566
1567    if expected_missing:
1568        pb.instance.add_missing_case_status.assert_called_with(*expected_missing)
1569
1570
1571TESTDATA_7 = [
1572    (
1573        True,
1574        [
1575            'z_ztest_unit_test__dummy_suite1_name__dummy_test_name1',
1576            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1577            'no match'
1578        ],
1579        [
1580            'dummy.test_id.dummy_suite1_name.dummy_name1',
1581            'dummy.test_id.dummy_suite2_name.dummy_name2'
1582        ]
1583    ),
1584    (
1585        False,
1586        [
1587            'z_ztest_unit_test__dummy_suite1_name__dummy_test_name1',
1588            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1589            'no match'
1590        ],
1591        [
1592            'dummy_suite1_name.dummy_name1',
1593            'dummy_suite2_name.dummy_name2'
1594        ]
1595    ),
1596    (
1597        True,
1598        [
1599            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1600            'z_ztest_unit_test__bad_suite3_name_no_test',
1601            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name4E',
1602            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_bad_name1E',
1603            '_ZN12_GLOBAL__N_1L51z_ztest_unit_test_dummy_suite3_name__test_bad_name2E',
1604            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name5E',
1605            '_ZN15foobarnamespaceL54z_ztest_unit_test__dummy_suite3_name__test_dummy_name6E',
1606        ],
1607        [
1608           'dummy.test_id.dummy_suite2_name.dummy_name2',
1609           'dummy.test_id.dummy_suite3_name.dummy_name4',
1610           'dummy.test_id.dummy_suite3_name.bad_name1E',
1611           'dummy.test_id.dummy_suite3_name.dummy_name5',
1612           'dummy.test_id.dummy_suite3_name.dummy_name6',
1613        ]
1614    ),
1615    (
1616        True,
1617        [
1618            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1619            'z_ztest_unit_test__bad_suite3_name_no_test',
1620            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name4E',
1621            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_bad_name1E',
1622            '_ZN12_GLOBAL__N_1L51z_ztest_unit_test_dummy_suite3_name__test_bad_name2E',
1623            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name5E',
1624            '_ZN15foobarnamespaceL54z_ztest_unit_test__dummy_suite3_name__test_dummy_name6E',
1625        ],
1626        [
1627           'dummy_suite2_name.dummy_name2',
1628           'dummy_suite3_name.dummy_name4',
1629           'dummy_suite3_name.bad_name1E',
1630           'dummy_suite3_name.dummy_name5',
1631           'dummy_suite3_name.dummy_name6',
1632        ]
1633    ),
1634    (
1635        True,
1636        ['no match'],
1637        []
1638    ),
1639]
1640
1641@pytest.mark.parametrize(
1642    'detailed_id, symbols_names, added_tcs',
1643    TESTDATA_7,
1644    ids=['two hits, one miss', 'two hits short id', 'demangle', 'demangle short id', 'nothing']
1645)
1646def test_projectbuilder_determine_testcases(
1647    mocked_jobserver,
1648    mocked_env,
1649    detailed_id,
1650    symbols_names,
1651    added_tcs
1652):
1653    symbols_mock = [mock.Mock(n=name) for name in symbols_names]
1654    for m in symbols_mock:
1655        m.configure_mock(name=m.n)
1656
1657    sections_mock = [mock.Mock(spec=SymbolTableSection)]
1658    sections_mock[0].iter_symbols = mock.Mock(return_value=symbols_mock)
1659
1660    elf_mock = mock.Mock()
1661    elf_mock().iter_sections = mock.Mock(return_value=sections_mock)
1662
1663    results_mock = mock.Mock()
1664
1665    instance_mock = mock.Mock()
1666    instance_mock.testcases = []
1667    instance_mock.testsuite.id = 'dummy.test_id'
1668    instance_mock.testsuite.ztest_suite_names = []
1669    instance_mock.testsuite.detailed_test_id = detailed_id
1670    instance_mock.compose_case_name = mock.Mock(side_effect=iter(added_tcs))
1671
1672    pb = ProjectBuilder(instance_mock, mocked_env, mocked_jobserver)
1673
1674    with mock.patch('twisterlib.runner.ELFFile', elf_mock), \
1675         mock.patch('builtins.open', mock.mock_open()):
1676        pb.determine_testcases(results_mock)
1677
1678    pb.instance.add_testcase.assert_has_calls(
1679        [mock.call(name=x) for x in added_tcs]
1680    )
1681    pb.instance.testsuite.add_testcase.assert_has_calls(
1682        [mock.call(name=x) for x in added_tcs]
1683    )
1684
1685
1686TESTDATA_8 = [
1687    (
1688        ['addition.al'],
1689        'dummy',
1690        ['addition.al', '.config', 'zephyr']
1691    ),
1692    (
1693        [],
1694        'all',
1695        ['.config', 'zephyr', 'testsuite_extra.conf', 'twister']
1696    ),
1697]
1698
1699@pytest.mark.parametrize(
1700    'additional_keep, runtime_artifact_cleanup, expected_files',
1701    TESTDATA_8,
1702    ids=['additional keep', 'all cleanup']
1703)
1704def test_projectbuilder_cleanup_artifacts(
1705    tmpdir,
1706    mocked_jobserver,
1707    additional_keep,
1708    runtime_artifact_cleanup,
1709    expected_files
1710):
1711    # tmpdir
1712    # ┣ twister
1713    # ┃ ┗ testsuite_extra.conf
1714    # ┣ dummy_dir
1715    # ┃ ┗ dummy.del
1716    # ┣ dummy_link_dir -> zephyr
1717    # ┣ zephyr
1718    # ┃ ┗ .config
1719    # ┗ addition.al
1720    twister_dir = tmpdir.mkdir('twister')
1721    testsuite_extra_conf = twister_dir.join('testsuite_extra.conf')
1722    testsuite_extra_conf.write_text('dummy', 'utf-8')
1723
1724    dummy_dir = tmpdir.mkdir('dummy_dir')
1725    dummy_del = dummy_dir.join('dummy.del')
1726    dummy_del.write_text('dummy', 'utf-8')
1727
1728    zephyr = tmpdir.mkdir('zephyr')
1729    config = zephyr.join('.config')
1730    config.write_text('dummy', 'utf-8')
1731
1732    dummy_link_dir = tmpdir.join('dummy_link_dir')
1733    os.symlink(zephyr, dummy_link_dir)
1734
1735    addition_al = tmpdir.join('addition.al')
1736    addition_al.write_text('dummy', 'utf-8')
1737
1738    instance_mock = mock.Mock()
1739    instance_mock.build_dir = tmpdir
1740    env_mock = mock.Mock()
1741
1742    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1743    pb.options = mock.Mock(runtime_artifact_cleanup=runtime_artifact_cleanup)
1744
1745    pb.cleanup_artifacts(additional_keep)
1746
1747    files_left = [p.name for p in list(pathlib.Path(tmpdir).glob('**/*'))]
1748
1749    assert sorted(files_left) == sorted(expected_files)
1750
1751
1752def test_projectbuilder_cleanup_device_testing_artifacts(
1753    caplog,
1754    mocked_jobserver
1755):
1756    bins = [os.path.join('zephyr', 'file.bin')]
1757
1758    instance_mock = mock.Mock()
1759    instance_mock.sysbuild = False
1760    build_dir = os.path.join('build', 'dir')
1761    instance_mock.build_dir = build_dir
1762    env_mock = mock.Mock()
1763
1764    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1765    pb._get_binaries = mock.Mock(return_value=bins)
1766    pb.cleanup_artifacts = mock.Mock()
1767    pb._sanitize_files = mock.Mock()
1768
1769    pb.cleanup_device_testing_artifacts()
1770
1771    assert f'Cleaning up for Device Testing {build_dir}' in caplog.text
1772
1773    pb.cleanup_artifacts.assert_called_once_with(
1774        [os.path.join('zephyr', 'file.bin'),
1775         os.path.join('zephyr', 'runners.yaml')]
1776    )
1777    pb._sanitize_files.assert_called_once()
1778
1779
1780TESTDATA_9 = [
1781    (
1782        None,
1783        [],
1784        [os.path.join('zephyr', 'zephyr.hex'),
1785         os.path.join('zephyr', 'zephyr.bin'),
1786         os.path.join('zephyr', 'zephyr.elf'),
1787         os.path.join('zephyr', 'zephyr.exe')]
1788    ),
1789    (
1790        [os.path.join('dummy.bin'), os.path.join('dummy.hex')],
1791        [os.path.join('dir2', 'dummy.elf')],
1792        [os.path.join('zephyr', 'dummy.bin'),
1793         os.path.join('zephyr', 'dummy.hex'),
1794         os.path.join('dir2', 'dummy.elf')]
1795    ),
1796]
1797
1798@pytest.mark.parametrize(
1799    'platform_binaries, runner_binaries, expected_binaries',
1800    TESTDATA_9,
1801    ids=['default', 'valid']
1802)
1803def test_projectbuilder_get_binaries(
1804    mocked_jobserver,
1805    platform_binaries,
1806    runner_binaries,
1807    expected_binaries
1808):
1809    def mock_get_domains(*args, **kwargs):
1810        return []
1811
1812    instance_mock = mock.Mock()
1813    instance_mock.build_dir = os.path.join('build', 'dir')
1814    instance_mock.domains.get_domains.side_effect = mock_get_domains
1815    instance_mock.platform = mock.Mock()
1816    instance_mock.platform.binaries = platform_binaries
1817    env_mock = mock.Mock()
1818
1819    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1820    pb._get_binaries_from_runners = mock.Mock(return_value=runner_binaries)
1821
1822    bins = pb._get_binaries()
1823
1824    assert all(bin in expected_binaries for bin in bins)
1825    assert all(bin in bins for bin in expected_binaries)
1826
1827
1828TESTDATA_10 = [
1829    (None, None, []),
1830    (None, {'dummy': 'dummy'}, []),
1831    (   None,
1832        {
1833            'config': {
1834                'elf_file': '/absolute/path/dummy.elf',
1835                'bin_file': 'path/dummy.bin'
1836            }
1837        },
1838        ['/absolute/path/dummy.elf', os.path.join('zephyr', 'path/dummy.bin')]
1839    ),
1840    (   'test_domain',
1841        {
1842            'config': {
1843                'elf_file': '/absolute/path/dummy.elf',
1844                'bin_file': 'path/dummy.bin'
1845            }
1846        },
1847        ['/absolute/path/dummy.elf', os.path.join('test_domain', 'zephyr', 'path/dummy.bin')]
1848    ),
1849]
1850
1851@pytest.mark.parametrize(
1852    'domain, runners_content, expected_binaries',
1853    TESTDATA_10,
1854    ids=['no file', 'no config', 'valid', 'with domain']
1855)
1856def test_projectbuilder_get_binaries_from_runners(
1857    mocked_jobserver,
1858    domain,
1859    runners_content,
1860    expected_binaries
1861):
1862    def mock_exists(fname):
1863        assert fname == os.path.join('build', 'dir', domain if domain else '',
1864                                     'zephyr', 'runners.yaml')
1865        return runners_content is not None
1866
1867    instance_mock = mock.Mock()
1868    instance_mock.build_dir = os.path.join('build', 'dir')
1869    env_mock = mock.Mock()
1870
1871    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1872
1873    with mock.patch('os.path.exists', mock_exists), \
1874         mock.patch('builtins.open', mock.mock_open()), \
1875         mock.patch('yaml.load', return_value=runners_content):
1876        if domain:
1877            bins = pb._get_binaries_from_runners(domain)
1878        else:
1879            bins = pb._get_binaries_from_runners()
1880
1881    assert all(bin in expected_binaries for bin in bins)
1882    assert all(bin in bins for bin in expected_binaries)
1883
1884
1885def test_projectbuilder_sanitize_files(mocked_jobserver):
1886    instance_mock = mock.Mock()
1887    env_mock = mock.Mock()
1888
1889    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1890    pb._sanitize_runners_file = mock.Mock()
1891    pb._sanitize_zephyr_base_from_files = mock.Mock()
1892
1893    pb._sanitize_files()
1894
1895    pb._sanitize_runners_file.assert_called_once()
1896    pb._sanitize_zephyr_base_from_files.assert_called_once()
1897
1898
1899
1900TESTDATA_11 = [
1901    (None, None),
1902    ('dummy: []', None),
1903    (
1904"""
1905config:
1906  elf_file: relative/path/dummy.elf
1907  hex_file: /absolute/path/build_dir/zephyr/dummy.hex
1908""",
1909"""
1910config:
1911  elf_file: relative/path/dummy.elf
1912  hex_file: dummy.hex
1913"""
1914    ),
1915]
1916
1917@pytest.mark.parametrize(
1918    'runners_text, expected_write_text',
1919    TESTDATA_11,
1920    ids=['no file', 'no config', 'valid']
1921)
1922def test_projectbuilder_sanitize_runners_file(
1923    mocked_jobserver,
1924    runners_text,
1925    expected_write_text
1926):
1927    def mock_exists(fname):
1928        return runners_text is not None
1929
1930    instance_mock = mock.Mock()
1931    instance_mock.build_dir = '/absolute/path/build_dir'
1932    env_mock = mock.Mock()
1933
1934    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1935
1936    with mock.patch('os.path.exists', mock_exists), \
1937         mock.patch('builtins.open',
1938                    mock.mock_open(read_data=runners_text)) as f:
1939        pb._sanitize_runners_file()
1940
1941    if expected_write_text is not None:
1942        f().write.assert_called_with(expected_write_text)
1943    else:
1944        f().write.assert_not_called()
1945
1946
1947TESTDATA_12 = [
1948    (
1949        {
1950            'CMakeCache.txt': mock.mock_open(
1951                read_data='canonical/zephyr/base/dummy.file: ERROR'
1952            )
1953        },
1954        {
1955            'CMakeCache.txt': 'dummy.file: ERROR'
1956        }
1957    ),
1958    (
1959        {
1960            os.path.join('zephyr', 'runners.yaml'): mock.mock_open(
1961                read_data='There was canonical/zephyr/base/dummy.file here'
1962            )
1963        },
1964        {
1965            os.path.join('zephyr', 'runners.yaml'): 'There was dummy.file here'
1966        }
1967    ),
1968]
1969
1970@pytest.mark.parametrize(
1971    'text_mocks, expected_write_texts',
1972    TESTDATA_12,
1973    ids=['CMakeCache file', 'runners.yaml file']
1974)
1975def test_projectbuilder_sanitize_zephyr_base_from_files(
1976    mocked_jobserver,
1977    text_mocks,
1978    expected_write_texts
1979):
1980    build_dir_path = 'canonical/zephyr/base/build_dir/'
1981
1982    def mock_exists(fname):
1983        if not fname.startswith(build_dir_path):
1984            return False
1985        return fname[len(build_dir_path):] in text_mocks
1986
1987    def mock_open(fname, *args, **kwargs):
1988        if not fname.startswith(build_dir_path):
1989            raise FileNotFoundError(errno.ENOENT, f'File {fname} not found.')
1990        return text_mocks[fname[len(build_dir_path):]]()
1991
1992    instance_mock = mock.Mock()
1993    instance_mock.build_dir = build_dir_path
1994    env_mock = mock.Mock()
1995
1996    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1997
1998    with mock.patch('os.path.exists', mock_exists), \
1999         mock.patch('builtins.open', mock_open), \
2000         mock.patch('twisterlib.runner.canonical_zephyr_base',
2001                    'canonical/zephyr/base'):
2002        pb._sanitize_zephyr_base_from_files()
2003
2004    for fname, fhandler in text_mocks.items():
2005        fhandler().write.assert_called_with(expected_write_texts[fname])
2006
2007
2008TESTDATA_13 = [
2009    (
2010        TwisterStatus.ERROR, True, True, False,
2011        ['INFO      20/25 dummy platform' \
2012         '            dummy.testsuite.name' \
2013         '                               ERROR dummy reason (cmake)'],
2014        None
2015    ),
2016    (
2017        TwisterStatus.FAIL, False, False, False,
2018        ['ERROR     dummy platform' \
2019         '            dummy.testsuite.name' \
2020         '                               FAILED: dummy reason'],
2021        'INFO    - Total complete:   20/  25  80%' \
2022        '  built (not run):    0, filtered:    3, failed:    3, error:    1'
2023    ),
2024    (
2025        TwisterStatus.SKIP, True, False, False,
2026        ['INFO      20/25 dummy platform' \
2027         '            dummy.testsuite.name' \
2028         '                               SKIPPED (dummy reason)'],
2029        None
2030    ),
2031    (
2032        TwisterStatus.FILTER, False, False, False,
2033        [],
2034        'INFO    - Total complete:   20/  25  80%' \
2035        '  built (not run):    0, filtered:    4, failed:    2, error:    1'
2036    ),
2037    (
2038        TwisterStatus.PASS, True, False, True,
2039        ['INFO      20/25 dummy platform' \
2040         '            dummy.testsuite.name' \
2041         '                               PASSED' \
2042         ' (dummy handler type: dummy dut, 60.000s <zephyr>)'],
2043        None
2044    ),
2045    (
2046        TwisterStatus.PASS, True, False, False,
2047        ['INFO      20/25 dummy platform' \
2048         '            dummy.testsuite.name' \
2049         '                               PASSED (build <zephyr>)'],
2050        None
2051    ),
2052    (
2053        'unknown status', False, False, False,
2054        ['Unknown status = unknown status'],
2055        'INFO    - Total complete:   20/  25  80%'
2056        '  built (not run):    0, filtered:    3, failed:    2, error:    1\r'
2057    )
2058]
2059
2060@pytest.mark.parametrize(
2061    'status, verbose, cmake_only, ready_run, expected_logs, expected_out',
2062    TESTDATA_13,
2063    ids=['verbose error cmake only', 'failed', 'verbose skipped', 'filtered',
2064         'verbose passed ready run', 'verbose passed', 'unknown status']
2065)
2066def test_projectbuilder_report_out(
2067    capfd,
2068    caplog,
2069    mocked_jobserver,
2070    status,
2071    verbose,
2072    cmake_only,
2073    ready_run,
2074    expected_logs,
2075    expected_out
2076):
2077    instance_mock = mock.Mock()
2078    instance_mock.handler.type_str = 'dummy handler type'
2079    instance_mock.handler.seed = 123
2080    instance_mock.handler.ready = ready_run
2081    instance_mock.run = ready_run
2082    instance_mock.dut = 'dummy dut'
2083    instance_mock.execution_time = 60
2084    instance_mock.platform.name = 'dummy platform'
2085    instance_mock.status = status
2086    instance_mock.reason = 'dummy reason'
2087    instance_mock.toolchain = 'zephyr'
2088    instance_mock.testsuite.name = 'dummy.testsuite.name'
2089    skip_mock_tc = mock.Mock(status=TwisterStatus.SKIP, reason=None)
2090    skip_mock_tc.name = 'mocked_testcase_to_skip'
2091    unknown_mock_tc = mock.Mock(status=mock.Mock(value='dummystatus'), reason=None)
2092    unknown_mock_tc.name = 'mocked_testcase_unknown'
2093    instance_mock.testsuite.testcases = [unknown_mock_tc for _ in range(25)]
2094    instance_mock.testcases = [unknown_mock_tc for _ in range(24)] + \
2095                              [skip_mock_tc]
2096    env_mock = mock.Mock()
2097
2098    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2099    pb.options.verbose = verbose
2100    pb.options.cmake_only = cmake_only
2101    pb.options.seed = 123
2102    pb.log_info_file = mock.Mock()
2103
2104    results_mock = mock.Mock(
2105        total = 25,
2106        done = 19,
2107        passed = 17,
2108        notrun = 0,
2109        failed = 2,
2110        filtered_configs = 3,
2111        filtered_runtime = 0,
2112        filtered_static = 0,
2113        error = 1,
2114        cases = 0,
2115        filtered_cases = 0,
2116        skipped_cases = 4,
2117        failed_cases = 0,
2118        error_cases = 0,
2119        blocked_cases = 0,
2120        passed_cases = 0,
2121        none_cases = 0,
2122        started_cases = 0
2123    )
2124    results_mock.iteration = 1
2125    def results_done_increment(value=1, decrement=False):
2126        results_mock.done += value * (-1 if decrement else 1)
2127    results_mock.done_increment = results_done_increment
2128    def filtered_configs_increment(value=1, decrement=False):
2129        results_mock.filtered_configs += value * (-1 if decrement else 1)
2130    results_mock.filtered_configs_increment = filtered_configs_increment
2131    def filtered_static_increment(value=1, decrement=False):
2132        results_mock.filtered_static += value * (-1 if decrement else 1)
2133    results_mock.filtered_static_increment = filtered_static_increment
2134    def filtered_runtime_increment(value=1, decrement=False):
2135        results_mock.filtered_runtime += value * (-1 if decrement else 1)
2136    results_mock.filtered_runtime_increment = filtered_runtime_increment
2137    def failed_increment(value=1, decrement=False):
2138        results_mock.failed += value * (-1 if decrement else 1)
2139    results_mock.failed_increment = failed_increment
2140    def notrun_increment(value=1, decrement=False):
2141        results_mock.notrun += value * (-1 if decrement else 1)
2142    results_mock.notrun_increment = notrun_increment
2143
2144    pb.report_out(results_mock)
2145
2146    assert results_mock.cases_increment.call_args_list == [mock.call(25)]
2147
2148    trim_actual_log = re.sub(
2149        r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2150        '',
2151        caplog.text
2152    )
2153    trim_actual_log = re.sub(r'twister:runner.py:\d+', '', trim_actual_log)
2154
2155    assert all([log in trim_actual_log for log in expected_logs])
2156
2157    print(trim_actual_log)
2158    if expected_out:
2159        out, err = capfd.readouterr()
2160        sys.stdout.write(out)
2161        sys.stderr.write(err)
2162
2163        # Remove 7b ANSI C1 escape sequences (colours)
2164        out = re.sub(
2165            r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2166            '',
2167            out
2168        )
2169
2170        assert expected_out in out
2171
2172
2173def test_projectbuilder_cmake_assemble_args():
2174    extra_args = ['CONFIG_FOO=y', 'DUMMY_EXTRA="yes"']
2175    handler = mock.Mock(ready=True, args=['dummy_handler'])
2176    extra_conf_files = ['extrafile1.conf', 'extrafile2.conf']
2177    extra_overlay_confs = ['extra_overlay_conf']
2178    extra_dtc_overlay_files = ['overlay1.dtc', 'overlay2.dtc']
2179    cmake_extra_args = ['CMAKE1="yes"', 'CMAKE2=n']
2180    build_dir = os.path.join('build', 'dir')
2181
2182    with mock.patch('os.path.exists', return_value=True):
2183        results = ProjectBuilder.cmake_assemble_args(extra_args, handler,
2184                                                     extra_conf_files,
2185                                                     extra_overlay_confs,
2186                                                     extra_dtc_overlay_files,
2187                                                     cmake_extra_args,
2188                                                     build_dir)
2189
2190    expected_results = [
2191        '-DCONFIG_FOO=y',
2192        '-DCMAKE1=\"yes\"',
2193        '-DCMAKE2=n',
2194        '-DDUMMY_EXTRA=yes',
2195        '-Ddummy_handler',
2196        '-DCONF_FILE=extrafile1.conf;extrafile2.conf',
2197        '-DDTC_OVERLAY_FILE=overlay1.dtc;overlay2.dtc',
2198        f'-DOVERLAY_CONFIG=extra_overlay_conf ' \
2199        f'{os.path.join("build", "dir", "twister", "testsuite_extra.conf")}'
2200    ]
2201
2202    assert results == expected_results
2203
2204
2205def test_projectbuilder_cmake():
2206    instance_mock = mock.Mock()
2207    instance_mock.handler = 'dummy handler'
2208    instance_mock.build_dir = os.path.join('build', 'dir')
2209    env_mock = mock.Mock()
2210
2211    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2212    pb.build_dir = 'build_dir'
2213    pb.testsuite.extra_args = ['some', 'args']
2214    pb.testsuite.extra_conf_files = ['some', 'files1']
2215    pb.testsuite.extra_overlay_confs = ['some', 'files2']
2216    pb.testsuite.extra_dtc_overlay_files = ['some', 'files3']
2217    pb.options.extra_args = ['other', 'args']
2218    pb.cmake_assemble_args = mock.Mock(return_value=['dummy'])
2219    cmake_res_mock = mock.Mock()
2220    pb.run_cmake = mock.Mock(return_value=cmake_res_mock)
2221
2222    res = pb.cmake(['dummy filter'])
2223
2224    assert res == cmake_res_mock
2225    pb.cmake_assemble_args.assert_called_once_with(
2226        pb.testsuite.extra_args,
2227        pb.instance.handler,
2228        pb.testsuite.extra_conf_files,
2229        pb.testsuite.extra_overlay_confs,
2230        pb.testsuite.extra_dtc_overlay_files,
2231        pb.options.extra_args,
2232        pb.instance.build_dir
2233    )
2234    pb.run_cmake.assert_called_once_with(['dummy'], ['dummy filter'])
2235
2236
2237def test_projectbuilder_build(mocked_jobserver):
2238    instance_mock = mock.Mock()
2239    instance_mock.testsuite.harness = 'test'
2240    env_mock = mock.Mock()
2241
2242    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2243
2244    pb.build_dir = 'build_dir'
2245    pb.run_build = mock.Mock(return_value={'dummy': 'dummy'})
2246
2247    res = pb.build()
2248
2249    pb.run_build.assert_called_once_with(['--build', 'build_dir'])
2250    assert res == {'dummy': 'dummy'}
2251
2252
2253TESTDATA_14 = [
2254    (
2255        True,
2256        'device',
2257        234,
2258        'native_sim',
2259        'posix',
2260        {'CONFIG_FAKE_ENTROPY_NATIVE_SIM': 'y'},
2261        'pytest',
2262        True,
2263        True,
2264        True,
2265        True,
2266        True,
2267        False
2268    ),
2269    (
2270        True,
2271        'not device',
2272        None,
2273        'native_sim',
2274        'not posix',
2275        {'CONFIG_FAKE_ENTROPY_NATIVE_SIM': 'y'},
2276        'not pytest',
2277        False,
2278        False,
2279        False,
2280        False,
2281        False,
2282        True
2283    ),
2284    (
2285        False,
2286        'device',
2287        234,
2288        'native_sim',
2289        'posix',
2290        {'CONFIG_FAKE_ENTROPY_NATIVE_SIM': 'y'},
2291        'pytest',
2292        False,
2293        False,
2294        False,
2295        False,
2296        False,
2297        False
2298    ),
2299]
2300
2301@pytest.mark.parametrize(
2302    'ready, type_str, seed, platform_name, platform_arch, defconfig, harness,' \
2303    ' expect_duts, expect_parse_generated, expect_seed,' \
2304    ' expect_extra_test_args, expect_pytest, expect_handle',
2305    TESTDATA_14,
2306    ids=['pytest full', 'not pytest minimal', 'not ready']
2307)
2308def test_projectbuilder_run(
2309    mocked_jobserver,
2310    ready,
2311    type_str,
2312    seed,
2313    platform_name,
2314    platform_arch,
2315    defconfig,
2316    harness,
2317    expect_duts,
2318    expect_parse_generated,
2319    expect_seed,
2320    expect_extra_test_args,
2321    expect_pytest,
2322    expect_handle
2323):
2324    pytest_mock = mock.Mock(spec=Pytest)
2325    harness_mock = mock.Mock()
2326
2327    def mock_harness(name):
2328        if name == 'Pytest':
2329            return pytest_mock
2330        else:
2331            return harness_mock
2332
2333    instance_mock = mock.Mock()
2334    instance_mock.handler.get_test_timeout = mock.Mock(return_value=60)
2335    instance_mock.handler.seed = 123
2336    instance_mock.handler.ready = ready
2337    instance_mock.handler.type_str = type_str
2338    instance_mock.handler.duts = [mock.Mock(name='dummy dut')]
2339    instance_mock.platform.name = platform_name
2340    instance_mock.platform.arch = platform_arch
2341    instance_mock.testsuite.harness = harness
2342    env_mock = mock.Mock()
2343
2344    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2345    pb.options.extra_test_args = ['dummy_arg1', 'dummy_arg2']
2346    pb.duts = ['another dut']
2347    pb.options.seed = seed
2348    pb.defconfig = defconfig
2349    pb.parse_generated = mock.Mock()
2350
2351    with mock.patch('twisterlib.runner.HarnessImporter.get_harness',
2352                    mock_harness):
2353        pb.run()
2354
2355    if expect_duts:
2356        assert pb.instance.handler.duts == ['another dut']
2357
2358    if expect_parse_generated:
2359        pb.parse_generated.assert_called_once()
2360
2361    if expect_seed:
2362        assert pb.instance.handler.seed == seed
2363
2364    if expect_extra_test_args:
2365        assert pb.instance.handler.extra_test_args == ['dummy_arg1',
2366                                                       'dummy_arg2']
2367
2368    if expect_pytest:
2369        pytest_mock.pytest_run.assert_called_once_with(60)
2370
2371    if expect_handle:
2372        pb.instance.handler.handle.assert_called_once_with(harness_mock)
2373
2374
2375TESTDATA_15 = [
2376    (False, False, False, True),
2377    (True, False, True, False),
2378    (False, True, False, True),
2379    (True, True, False, True),
2380]
2381
2382@pytest.mark.parametrize(
2383    'enable_size_report, cmake_only, expect_calc_size, expect_zeroes',
2384    TESTDATA_15,
2385    ids=['none', 'size_report', 'cmake', 'size_report+cmake']
2386)
2387def test_projectbuilder_gather_metrics(
2388    mocked_jobserver,
2389    enable_size_report,
2390    cmake_only,
2391    expect_calc_size,
2392    expect_zeroes
2393):
2394    instance_mock = mock.Mock()
2395    instance_mock.metrics = {}
2396    env_mock = mock.Mock()
2397
2398    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2399    pb.options.enable_size_report = enable_size_report
2400    pb.options.create_rom_ram_report = False
2401    pb.options.cmake_only = cmake_only
2402    pb.calc_size = mock.Mock()
2403
2404    pb.gather_metrics(instance_mock)
2405
2406    if expect_calc_size:
2407        pb.calc_size.assert_called_once()
2408
2409    if expect_zeroes:
2410        assert instance_mock.metrics['used_ram'] == 0
2411        assert instance_mock.metrics['used_rom'] == 0
2412        assert instance_mock.metrics['available_rom'] == 0
2413        assert instance_mock.metrics['available_ram'] == 0
2414        assert instance_mock.metrics['unrecognized'] == []
2415
2416
2417TESTDATA_16 = [
2418    (TwisterStatus.ERROR, mock.ANY, False, False, False),
2419    (TwisterStatus.FAIL, mock.ANY, False, False, False),
2420    (TwisterStatus.SKIP, mock.ANY, False, False, False),
2421    (TwisterStatus.FILTER, 'native', False, False, True),
2422    (TwisterStatus.PASS, 'qemu', False, False, True),
2423    (TwisterStatus.FILTER, 'unit', False, False, True),
2424    (TwisterStatus.FILTER, 'mcu', True, True, False),
2425    (TwisterStatus.PASS, 'frdm_k64f', False, True, False),
2426]
2427
2428@pytest.mark.parametrize(
2429    'status, platform_type, expect_warnings, expect_calcs, expect_zeroes',
2430    TESTDATA_16,
2431    ids=[x[0] + (', ' + x[1]) if x[1] != mock.ANY else '' for x in TESTDATA_16]
2432)
2433def test_projectbuilder_calc_size(
2434    status,
2435    platform_type,
2436    expect_warnings,
2437    expect_calcs,
2438    expect_zeroes
2439):
2440    size_calc_mock = mock.Mock()
2441
2442    instance_mock = mock.Mock()
2443    instance_mock.status = status
2444    instance_mock.platform.type = platform_type
2445    instance_mock.metrics = {}
2446    instance_mock.calculate_sizes = mock.Mock(return_value=size_calc_mock)
2447
2448    from_buildlog = True
2449
2450    ProjectBuilder.calc_size(instance_mock, from_buildlog)
2451
2452    if expect_calcs:
2453        instance_mock.calculate_sizes.assert_called_once_with(
2454            from_buildlog=from_buildlog,
2455            generate_warning=expect_warnings
2456        )
2457
2458        assert instance_mock.metrics['used_ram'] == \
2459               size_calc_mock.get_used_ram()
2460        assert instance_mock.metrics['used_rom'] == \
2461               size_calc_mock.get_used_rom()
2462        assert instance_mock.metrics['available_rom'] == \
2463               size_calc_mock.get_available_rom()
2464        assert instance_mock.metrics['available_ram'] == \
2465               size_calc_mock.get_available_ram()
2466        assert instance_mock.metrics['unrecognized'] == \
2467               size_calc_mock.unrecognized_sections()
2468
2469    if expect_zeroes:
2470        assert instance_mock.metrics['used_ram'] == 0
2471        assert instance_mock.metrics['used_rom'] == 0
2472        assert instance_mock.metrics['available_rom'] == 0
2473        assert instance_mock.metrics['available_ram'] == 0
2474        assert instance_mock.metrics['unrecognized'] == []
2475
2476    if expect_calcs or expect_zeroes:
2477        assert instance_mock.metrics['handler_time'] == \
2478               instance_mock.execution_time
2479    else:
2480        assert instance_mock.metrics == {}
2481
2482
2483TESTDATA_17 = [
2484    ('linux', 'posix', {'jobs': 4}, True, 32, 'GNUMakeJobClient'),
2485    ('linux', 'posix', {'build_only': True}, False, 16, 'GNUMakeJobServer'),
2486    ('linux', '???', {}, False, 8, 'JobClient'),
2487    ('linux', '???', {'jobs': 4}, False, 4, 'JobClient'),
2488]
2489
2490@pytest.mark.parametrize(
2491    'platform, os_name, options, jobclient_from_environ, expected_jobs,' \
2492    ' expected_jobserver',
2493    TESTDATA_17,
2494    ids=['GNUMakeJobClient', 'GNUMakeJobServer',
2495         'JobClient', 'Jobclient+options']
2496)
2497def test_twisterrunner_run(
2498    caplog,
2499    platform,
2500    os_name,
2501    options,
2502    jobclient_from_environ,
2503    expected_jobs,
2504    expected_jobserver
2505):
2506    def mock_client_from_environ(jobs):
2507        if jobclient_from_environ:
2508            jobclient_mock = mock.Mock(jobs=32)
2509            jobclient_mock.name = 'GNUMakeJobClient'
2510            return jobclient_mock
2511        return None
2512
2513    instances = {'dummy instance': mock.Mock(metrics={'k': 'v'})}
2514    suites = [mock.Mock()]
2515    env_mock = mock.Mock()
2516
2517    tr = TwisterRunner(instances, suites, env=env_mock)
2518    tr.options.retry_failed = 2
2519    tr.options.retry_interval = 10
2520    tr.options.retry_build_errors = True
2521    tr.options.jobs = None
2522    tr.options.build_only = None
2523    for k, v in options.items():
2524        setattr(tr.options, k, v)
2525    tr.update_counting_before_pipeline = mock.Mock()
2526    tr.execute = mock.Mock()
2527    tr.show_brief = mock.Mock()
2528
2529    gnumakejobserver_mock = mock.Mock()
2530    gnumakejobserver_mock().name='GNUMakeJobServer'
2531    jobclient_mock = mock.Mock()
2532    jobclient_mock().name='JobClient'
2533
2534    pipeline_q = queue.LifoQueue()
2535    done_q = queue.LifoQueue()
2536    done_instance = mock.Mock(
2537        metrics={'k2': 'v2'},
2538        execution_time=30
2539    )
2540    done_instance.name='dummy instance'
2541    done_q.put(done_instance)
2542    manager_mock = mock.Mock()
2543    manager_mock().LifoQueue = mock.Mock(
2544        side_effect=iter([pipeline_q, done_q])
2545    )
2546
2547    results_mock = mock.Mock()
2548    results_mock().error = 1
2549    results_mock().iteration = 0
2550    results_mock().failed = 2
2551    results_mock().total = 9
2552
2553    def iteration_increment(value=1, decrement=False):
2554        results_mock().iteration += value * (-1 if decrement else 1)
2555    results_mock().iteration_increment = iteration_increment
2556
2557    with mock.patch('twisterlib.runner.ExecutionCounter', results_mock), \
2558         mock.patch('twisterlib.runner.BaseManager', manager_mock), \
2559         mock.patch('twisterlib.runner.GNUMakeJobClient.from_environ',
2560                    mock_client_from_environ), \
2561         mock.patch('twisterlib.runner.GNUMakeJobServer',
2562                    gnumakejobserver_mock), \
2563         mock.patch('twisterlib.runner.JobClient', jobclient_mock), \
2564         mock.patch('multiprocessing.cpu_count', return_value=8), \
2565         mock.patch('sys.platform', platform), \
2566         mock.patch('time.sleep', mock.Mock()), \
2567         mock.patch('os.name', os_name):
2568        tr.run()
2569
2570    assert f'JOBS: {expected_jobs}' in caplog.text
2571
2572    assert tr.jobserver.name == expected_jobserver
2573
2574    assert tr.instances['dummy instance'].metrics == {
2575        'k': 'v',
2576        'k2': 'v2',
2577        'handler_time': 30,
2578        'unrecognized': []
2579    }
2580
2581    assert results_mock().error == 0
2582
2583
2584def test_twisterrunner_update_counting_before_pipeline():
2585    instances = {
2586        'dummy1': mock.Mock(
2587            status=TwisterStatus.FILTER,
2588            reason='runtime filter',
2589            testsuite=mock.Mock(
2590                testcases=[mock.Mock()]
2591            )
2592        ),
2593        'dummy2': mock.Mock(
2594            status=TwisterStatus.FILTER,
2595            reason='static filter',
2596            testsuite=mock.Mock(
2597                testcases=[mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock()]
2598            )
2599        ),
2600        'dummy3': mock.Mock(
2601            status=TwisterStatus.ERROR,
2602            reason='error',
2603            testsuite=mock.Mock(
2604                testcases=[mock.Mock()]
2605            )
2606        ),
2607        'dummy4': mock.Mock(
2608            status=TwisterStatus.PASS,
2609            reason='OK',
2610            testsuite=mock.Mock(
2611                testcases=[mock.Mock()]
2612            )
2613        ),
2614        'dummy5': mock.Mock(
2615            status=TwisterStatus.SKIP,
2616            reason=None,
2617            testsuite=mock.Mock(
2618                testcases=[mock.Mock()]
2619            )
2620        )
2621    }
2622    suites = [mock.Mock()]
2623    env_mock = mock.Mock()
2624
2625    tr = TwisterRunner(instances, suites, env=env_mock)
2626    tr.results = mock.Mock(
2627        total = 0,
2628        done = 0,
2629        passed = 0,
2630        failed = 0,
2631        filtered_configs = 0,
2632        filtered_runtime = 0,
2633        filtered_static = 0,
2634        error = 0,
2635        cases = 0,
2636        filtered_cases = 0,
2637        skipped_cases = 0,
2638        failed_cases = 0,
2639        error_cases = 0,
2640        blocked_cases = 0,
2641        passed_cases = 0,
2642        none_cases = 0,
2643        started_cases = 0
2644    )
2645    def filtered_configs_increment(value=1, decrement=False):
2646        tr.results.filtered_configs += value * (-1 if decrement else 1)
2647    tr.results.filtered_configs_increment = filtered_configs_increment
2648    def filtered_static_increment(value=1, decrement=False):
2649        tr.results.filtered_static += value * (-1 if decrement else 1)
2650    tr.results.filtered_static_increment = filtered_static_increment
2651    def error_increment(value=1, decrement=False):
2652        tr.results.error += value * (-1 if decrement else 1)
2653    tr.results.error_increment = error_increment
2654    def cases_increment(value=1, decrement=False):
2655        tr.results.cases += value * (-1 if decrement else 1)
2656    tr.results.cases_increment = cases_increment
2657    def filtered_cases_increment(value=1, decrement=False):
2658        tr.results.filtered_cases += value * (-1 if decrement else 1)
2659    tr.results.filtered_cases_increment = filtered_cases_increment
2660
2661    tr.update_counting_before_pipeline()
2662
2663    assert tr.results.filtered_static == 1
2664    assert tr.results.filtered_configs == 1
2665    assert tr.results.filtered_cases == 4
2666    assert tr.results.cases == 4
2667    assert tr.results.error == 1
2668
2669
2670def test_twisterrunner_show_brief(caplog):
2671    instances = {
2672        'dummy1': mock.Mock(),
2673        'dummy2': mock.Mock(),
2674        'dummy3': mock.Mock(),
2675        'dummy4': mock.Mock(),
2676        'dummy5': mock.Mock()
2677    }
2678    suites = [mock.Mock(), mock.Mock()]
2679    env_mock = mock.Mock()
2680
2681    tr = TwisterRunner(instances, suites, env=env_mock)
2682    tr.results = mock.Mock(
2683        filtered_static = 3,
2684        filtered_configs = 4,
2685        skipped_cases = 0,
2686        cases = 0,
2687        error = 0
2688    )
2689
2690    tr.show_brief()
2691
2692    log = '2 test scenarios (5 configurations) selected,' \
2693          ' 4 configurations filtered (3 by static filter, 1 at runtime).'
2694
2695    assert log in caplog.text
2696
2697
2698TESTDATA_18 = [
2699    (False, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2700    (False, False, True, [{'op': 'filter', 'test': mock.ANY},
2701                          {'op': 'cmake', 'test': mock.ANY}]),
2702    (False, True, True, [{'op': 'run', 'test': mock.ANY},
2703                         {'op': 'run', 'test': mock.ANY}]),
2704    (False, True, False, [{'op': 'run', 'test': mock.ANY}]),
2705    (True, True, False, [{'op': 'cmake', 'test': mock.ANY}]),
2706    (True, True, True, [{'op': 'filter', 'test': mock.ANY},
2707                        {'op': 'cmake', 'test': mock.ANY}]),
2708    (True, False, True, [{'op': 'filter', 'test': mock.ANY},
2709                         {'op': 'cmake', 'test': mock.ANY}]),
2710    (True, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2711]
2712
2713@pytest.mark.parametrize(
2714    'build_only, test_only, retry_build_errors, expected_pipeline_elements',
2715    TESTDATA_18,
2716    ids=['none', 'retry', 'test+retry', 'test', 'build+test',
2717         'build+test+retry', 'build+retry', 'build']
2718)
2719def test_twisterrunner_add_tasks_to_queue(
2720    build_only,
2721    test_only,
2722    retry_build_errors,
2723    expected_pipeline_elements
2724):
2725    def mock_get_cmake_filter_stages(filter, keys):
2726        return [filter]
2727
2728    instances = {
2729        'dummy1': mock.Mock(run=True, retries=0, status=TwisterStatus.PASS, build_dir="/tmp"),
2730        'dummy2': mock.Mock(run=True, retries=0, status=TwisterStatus.SKIP, build_dir="/tmp"),
2731        'dummy3': mock.Mock(run=True, retries=0, status=TwisterStatus.FILTER, build_dir="/tmp"),
2732        'dummy4': mock.Mock(run=True, retries=0, status=TwisterStatus.ERROR, build_dir="/tmp"),
2733        'dummy5': mock.Mock(run=True, retries=0, status=TwisterStatus.FAIL, build_dir="/tmp")
2734    }
2735    instances['dummy4'].testsuite.filter = 'some'
2736    instances['dummy5'].testsuite.filter = 'full'
2737    suites = [mock.Mock(), mock.Mock()]
2738    env_mock = mock.Mock()
2739
2740    tr = TwisterRunner(instances, suites, env=env_mock)
2741    tr.get_cmake_filter_stages = mock.Mock(
2742        side_effect=mock_get_cmake_filter_stages
2743    )
2744    tr.results = mock.Mock(iteration=0)
2745
2746    pipeline_mock = mock.Mock()
2747
2748    tr.add_tasks_to_queue(
2749        pipeline_mock,
2750        build_only,
2751        test_only,
2752        retry_build_errors
2753    )
2754
2755    assert all(
2756        [build_only != instance.run for instance in instances.values()]
2757    )
2758
2759    tr.get_cmake_filter_stages.assert_any_call('full', mock.ANY)
2760    if retry_build_errors:
2761        tr.get_cmake_filter_stages.assert_any_call('some', mock.ANY)
2762
2763    print(pipeline_mock.put.call_args_list)
2764    print([mock.call(el) for el in expected_pipeline_elements])
2765
2766    assert pipeline_mock.put.call_args_list == \
2767           [mock.call(el) for el in expected_pipeline_elements]
2768
2769
2770TESTDATA_19 = [
2771    ('linux'),
2772    ('nt')
2773]
2774
2775@pytest.mark.parametrize(
2776    'platform',
2777    TESTDATA_19,
2778)
2779def test_twisterrunner_pipeline_mgr(mocked_jobserver, platform):
2780    counter = 0
2781    def mock_get_nowait():
2782        nonlocal counter
2783        counter += 1
2784        if counter > 5:
2785            raise queue.Empty()
2786        return {'test': 'dummy'}
2787
2788    instances = {}
2789    suites = []
2790    env_mock = mock.Mock()
2791
2792    tr = TwisterRunner(instances, suites, env=env_mock)
2793    tr.jobserver = mock.Mock(
2794        get_job=mock.Mock(
2795            return_value=nullcontext()
2796        )
2797    )
2798
2799    pipeline_mock = mock.Mock()
2800    pipeline_mock.get_nowait = mock.Mock(side_effect=mock_get_nowait)
2801    done_queue_mock = mock.Mock()
2802    lock_mock = mock.Mock()
2803    results_mock = mock.Mock()
2804
2805    with mock.patch('sys.platform', platform), \
2806         mock.patch('twisterlib.runner.ProjectBuilder',\
2807                    return_value=mock.Mock()) as pb:
2808        tr.pipeline_mgr(pipeline_mock, done_queue_mock, lock_mock, results_mock)
2809
2810    assert len(pb().process.call_args_list) == 5
2811
2812    if platform == 'linux':
2813        tr.jobserver.get_job.assert_called_once()
2814
2815
2816def test_twisterrunner_execute(caplog):
2817    counter = 0
2818    def mock_join():
2819        nonlocal counter
2820        counter += 1
2821        if counter > 3:
2822            raise KeyboardInterrupt()
2823
2824    instances = {}
2825    suites = []
2826    env_mock = mock.Mock()
2827
2828    tr = TwisterRunner(instances, suites, env=env_mock)
2829    tr.add_tasks_to_queue = mock.Mock()
2830    tr.jobs = 5
2831
2832    process_mock = mock.Mock()
2833    process_mock().join = mock.Mock(side_effect=mock_join)
2834    process_mock().exitcode = 0
2835    pipeline_mock = mock.Mock()
2836    done_mock = mock.Mock()
2837
2838    with mock.patch('twisterlib.runner.Process', process_mock):
2839        tr.execute(pipeline_mock, done_mock)
2840
2841    assert 'Execution interrupted' in caplog.text
2842
2843    assert len(process_mock().start.call_args_list) == 5
2844    assert len(process_mock().join.call_args_list) == 4
2845    assert len(process_mock().terminate.call_args_list) == 5
2846
2847
2848
2849TESTDATA_20 = [
2850    ('', []),
2851    ('not ARCH in ["x86", "arc"]', ['full']),
2852    ('dt_dummy(x, y)', ['dts']),
2853    ('not CONFIG_FOO', ['kconfig']),
2854    ('dt_dummy and CONFIG_FOO', ['dts', 'kconfig']),
2855]
2856
2857@pytest.mark.parametrize(
2858    'filter, expected_result',
2859    TESTDATA_20,
2860    ids=['none', 'full', 'dts', 'kconfig', 'dts+kconfig']
2861)
2862def test_twisterrunner_get_cmake_filter_stages(filter, expected_result):
2863    result = TwisterRunner.get_cmake_filter_stages(filter, ['not', 'and'])
2864
2865    assert sorted(result) == sorted(expected_result)
2866