1#!/usr/bin/env python3
2# Copyright (c) 2023 Google LLC
3#
4# SPDX-License-Identifier: Apache-2.0
5"""
6Tests for runner.py classes
7"""
8
9import errno
10import mock
11import os
12import pathlib
13import pytest
14import queue
15import re
16import subprocess
17import sys
18import yaml
19
20from contextlib import nullcontext
21from elftools.elf.sections import SymbolTableSection
22from typing import List
23
24ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
25sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
26
27from twisterlib.error import BuildError
28from twisterlib.harness import Pytest
29
30from twisterlib.runner import (
31    CMake,
32    ExecutionCounter,
33    FilterBuilder,
34    ProjectBuilder,
35    TwisterRunner
36)
37
38@pytest.fixture
39def mocked_instance(tmp_path):
40    instance = mock.Mock()
41    testsuite = mock.Mock()
42    testsuite.source_dir: str = ''
43    instance.testsuite = testsuite
44    platform = mock.Mock()
45    platform.binaries: List[str] = []
46    instance.platform = platform
47    build_dir = tmp_path / 'build_dir'
48    os.makedirs(build_dir)
49    instance.build_dir: str = str(build_dir)
50    return instance
51
52
53@pytest.fixture
54def mocked_env():
55    env = mock.Mock()
56    options = mock.Mock()
57    env.options = options
58    return env
59
60
61@pytest.fixture
62def mocked_jobserver():
63    jobserver = mock.Mock()
64    return jobserver
65
66
67@pytest.fixture
68def project_builder(mocked_instance, mocked_env, mocked_jobserver) -> ProjectBuilder:
69    project_builder = ProjectBuilder(mocked_instance, mocked_env, mocked_jobserver)
70    return project_builder
71
72
73@pytest.fixture
74def runners(project_builder: ProjectBuilder) -> dict:
75    """
76    Create runners.yaml file in build_dir/zephyr directory and return file
77    content as dict.
78    """
79    build_dir_zephyr_path = os.path.join(project_builder.instance.build_dir, 'zephyr')
80    os.makedirs(build_dir_zephyr_path)
81    runners_file_path = os.path.join(build_dir_zephyr_path, 'runners.yaml')
82    runners_content: dict = {
83        'config': {
84            'elf_file': 'zephyr.elf',
85            'hex_file': os.path.join(build_dir_zephyr_path, 'zephyr.elf'),
86            'bin_file': 'zephyr.bin',
87        }
88    }
89    with open(runners_file_path, 'w') as file:
90        yaml.dump(runners_content, file)
91
92    return runners_content
93
94
95@mock.patch("os.path.exists")
96def test_projectbuilder_cmake_assemble_args_single(m):
97    # Causes the additional_overlay_path to be appended
98    m.return_value = True
99
100    class MockHandler:
101        pass
102
103    handler = MockHandler()
104    handler.args = ["handler_arg1", "handler_arg2"]
105    handler.ready = True
106
107    assert(ProjectBuilder.cmake_assemble_args(
108        ["basearg1", "CONFIG_t=\"test\"", "SNIPPET_t=\"test\""],
109        handler,
110        ["a.conf;b.conf", "c.conf"],
111        ["extra_overlay.conf"],
112        ["x.overlay;y.overlay", "z.overlay"],
113        ["cmake1=foo", "cmake2=bar"],
114        "/builddir/",
115    ) == [
116        "-DCONFIG_t=\"test\"",
117        "-Dcmake1=foo", "-Dcmake2=bar",
118        "-Dbasearg1", "-DSNIPPET_t=test",
119        "-Dhandler_arg1", "-Dhandler_arg2",
120        "-DCONF_FILE=a.conf;b.conf;c.conf",
121        "-DDTC_OVERLAY_FILE=x.overlay;y.overlay;z.overlay",
122        "-DOVERLAY_CONFIG=extra_overlay.conf "
123        "/builddir/twister/testsuite_extra.conf",
124    ])
125
126
127def test_if_default_binaries_are_taken_properly(project_builder: ProjectBuilder):
128    default_binaries = [
129        os.path.join('zephyr', 'zephyr.hex'),
130        os.path.join('zephyr', 'zephyr.bin'),
131        os.path.join('zephyr', 'zephyr.elf'),
132        os.path.join('zephyr', 'zephyr.exe'),
133    ]
134    project_builder.testsuite.sysbuild = False
135    binaries = project_builder._get_binaries()
136    assert sorted(binaries) == sorted(default_binaries)
137
138
139def test_if_binaries_from_platform_are_taken_properly(project_builder: ProjectBuilder):
140    platform_binaries = ['spi_image.bin']
141    project_builder.platform.binaries = platform_binaries
142    project_builder.testsuite.sysbuild = False
143    platform_binaries_expected = [os.path.join('zephyr', bin) for bin in platform_binaries]
144    binaries = project_builder._get_binaries()
145    assert sorted(binaries) == sorted(platform_binaries_expected)
146
147
148def test_if_binaries_from_runners_are_taken_properly(runners, project_builder: ProjectBuilder):
149    runners_binaries = list(runners['config'].values())
150    runners_binaries_expected = [bin if os.path.isabs(bin) else os.path.join('zephyr', bin) for bin in runners_binaries]
151    binaries = project_builder._get_binaries_from_runners()
152    assert sorted(binaries) == sorted(runners_binaries_expected)
153
154
155def test_if_runners_file_is_sanitized_properly(runners, project_builder: ProjectBuilder):
156    runners_file_path = os.path.join(project_builder.instance.build_dir, 'zephyr', 'runners.yaml')
157    with open(runners_file_path, 'r') as file:
158        unsanitized_runners_content = yaml.safe_load(file)
159    unsanitized_runners_binaries = list(unsanitized_runners_content['config'].values())
160    abs_paths = [bin for bin in unsanitized_runners_binaries if os.path.isabs(bin)]
161    assert len(abs_paths) > 0
162
163    project_builder._sanitize_runners_file()
164
165    with open(runners_file_path, 'r') as file:
166        sanitized_runners_content = yaml.safe_load(file)
167    sanitized_runners_binaries = list(sanitized_runners_content['config'].values())
168    abs_paths = [bin for bin in sanitized_runners_binaries if os.path.isabs(bin)]
169    assert len(abs_paths) == 0
170
171
172def test_if_zephyr_base_is_sanitized_properly(project_builder: ProjectBuilder):
173    sanitized_path_expected = os.path.join('sanitized', 'path')
174    path_to_sanitize = os.path.join(os.path.realpath(ZEPHYR_BASE), sanitized_path_expected)
175    cmakecache_file_path = os.path.join(project_builder.instance.build_dir, 'CMakeCache.txt')
176    with open(cmakecache_file_path, 'w') as file:
177        file.write(path_to_sanitize)
178
179    project_builder._sanitize_zephyr_base_from_files()
180
181    with open(cmakecache_file_path, 'r') as file:
182        sanitized_path = file.read()
183    assert sanitized_path == sanitized_path_expected
184
185
186def test_executioncounter(capfd):
187    ec = ExecutionCounter(total=12)
188
189    ec.cases = 25
190    ec.skipped_cases = 6
191    ec.error = 2
192    ec.iteration = 2
193    ec.done = 9
194    ec.passed = 6
195    ec.skipped_configs = 3
196    ec.skipped_runtime = 1
197    ec.skipped_filter = 2
198    ec.failed = 1
199
200    ec.summary()
201
202    out, err = capfd.readouterr()
203    sys.stdout.write(out)
204    sys.stderr.write(err)
205
206    assert (
207        f'--------------------------------\n'
208        f'Total test suites: 12\n'
209        f'Total test cases: 25\n'
210        f'Executed test cases: 19\n'
211        f'Skipped test cases: 6\n'
212        f'Completed test suites: 9\n'
213        f'Passing test suites: 6\n'
214        f'Failing test suites: 1\n'
215        f'Skipped test suites: 3\n'
216        f'Skipped test suites (runtime): 1\n'
217        f'Skipped test suites (filter): 2\n'
218        f'Errors: 2\n'
219        f'--------------------------------'
220    ) in out
221
222    assert ec.cases == 25
223    assert ec.skipped_cases == 6
224    assert ec.error == 2
225    assert ec.iteration == 2
226    assert ec.done == 9
227    assert ec.passed == 6
228    assert ec.skipped_configs == 3
229    assert ec.skipped_runtime == 1
230    assert ec.skipped_filter == 2
231    assert ec.failed == 1
232
233
234def test_cmake_parse_generated(mocked_jobserver):
235    testsuite_mock = mock.Mock()
236    platform_mock = mock.Mock()
237    source_dir = os.path.join('source', 'dir')
238    build_dir = os.path.join('build', 'dir')
239
240    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
241                  mocked_jobserver)
242
243    result = cmake.parse_generated()
244
245    assert cmake.defconfig == {}
246    assert result == {}
247
248
249TESTDATA_1_1 = [
250    ('linux'),
251    ('nt')
252]
253TESTDATA_1_2 = [
254    (0, False, 'dummy out',
255     True, True, 'passed', None, False, True),
256    (0, True, '',
257     False, False, 'passed', None, False, False),
258    (1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
259     True,  True, 'skipped', 'FLASH overflow', True, False),
260    (1, True, 'Error: Image size (99 B) + trailer (1 B) exceeds requested size',
261     True, True, 'skipped', 'imgtool overflow', True, False),
262    (1, True, 'mock.ANY',
263     True, True, 'error', 'Build failure', False, False)
264]
265
266@pytest.mark.parametrize(
267    'return_code, is_instance_run, p_out, expect_returncode,' \
268    ' expect_writes, expected_status, expected_reason,' \
269    ' expected_change_skip, expected_add_missing',
270    TESTDATA_1_2,
271    ids=['no error, no instance run', 'no error, instance run',
272         'error - region overflow', 'error - image size exceed', 'error']
273)
274@pytest.mark.parametrize('sys_platform', TESTDATA_1_1)
275def test_cmake_run_build(
276    sys_platform,
277    return_code,
278    is_instance_run,
279    p_out,
280    expect_returncode,
281    expect_writes,
282    expected_status,
283    expected_reason,
284    expected_change_skip,
285    expected_add_missing
286):
287    process_mock = mock.Mock(
288        returncode=return_code,
289        communicate=mock.Mock(
290            return_value=(p_out.encode(sys.getdefaultencoding()), None)
291        )
292    )
293
294    def mock_popen(*args, **kwargs):
295        return process_mock
296
297    testsuite_mock = mock.Mock()
298    platform_mock = mock.Mock()
299    platform_mock.name = '<platform name>'
300    source_dir = os.path.join('source', 'dir')
301    build_dir = os.path.join('build', 'dir')
302    jobserver_mock = mock.Mock(
303        popen=mock.Mock(side_effect=mock_popen)
304    )
305    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
306    instance_mock.build_time = 0
307    instance_mock.run = is_instance_run
308    instance_mock.status = None
309    instance_mock.reason = None
310
311    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
312                  jobserver_mock)
313    cmake.cwd = os.path.join('dummy', 'working', 'dir')
314    cmake.instance = instance_mock
315    cmake.options = mock.Mock()
316    cmake.options.overflow_as_errors = False
317
318    cmake_path = os.path.join('dummy', 'cmake')
319
320    popen_mock = mock.Mock(side_effect=mock_popen)
321    change_mock = mock.Mock()
322
323    with mock.patch('sys.platform', sys_platform), \
324         mock.patch('shutil.which', return_value=cmake_path), \
325         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
326                    change_mock), \
327         mock.patch('builtins.open', mock.mock_open()), \
328         mock.patch('subprocess.Popen', popen_mock):
329        result = cmake.run_build(args=['arg1', 'arg2'])
330
331    expected_results = {}
332    if expect_returncode:
333        expected_results['returncode'] = return_code
334    if expected_results == {}:
335        expected_results = None
336
337    assert expected_results == result
338
339    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
340                   popen_mock
341    popen_caller.assert_called_once_with(
342        [os.path.join('dummy', 'cmake'), 'arg1', 'arg2'],
343        stdout=subprocess.PIPE,
344        stderr=subprocess.STDOUT,
345        cwd=os.path.join('dummy', 'working', 'dir')
346    )
347
348    assert cmake.instance.status == expected_status
349    assert cmake.instance.reason == expected_reason
350
351    if expected_change_skip:
352        change_mock.assert_called_once()
353
354    if expected_add_missing:
355        cmake.instance.add_missing_case_status.assert_called_once_with(
356            'skipped', 'Test was built only'
357        )
358
359
360TESTDATA_2_1 = [
361    ('linux'),
362    ('nt')
363]
364TESTDATA_2_2 = [
365    (True, ['dummy_stage_1', 'ds2'],
366     0, False, '',
367     True, True, False,
368     None, None,
369     [os.path.join('dummy', 'cmake'),
370      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1',
371      '-DCONFIG_COMPILER_WARNINGS_AS_ERRORS=y',
372      '-DEXTRA_GEN_DEFINES_ARGS=--edtlib-Werror', '-Gdummy_generator',
373      '-S' + os.path.join('source', 'dir'),
374      'arg1', 'arg2',
375      '-DBOARD=<platform name>',
376      '-DSNIPPET=dummy snippet 1;ds2',
377      '-DMODULES=dummy_stage_1,ds2',
378      '-Pzephyr_base/cmake/package_helper.cmake']),
379    (False, [],
380     1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
381     True, False, True,
382     'error', 'Cmake build failure',
383     [os.path.join('dummy', 'cmake'),
384      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1',
385      '-DCONFIG_COMPILER_WARNINGS_AS_ERRORS=n',
386      '-DEXTRA_GEN_DEFINES_ARGS=', '-Gdummy_generator',
387      '-Szephyr_base/share/sysbuild',
388      '-DAPP_DIR=' + os.path.join('source', 'dir'),
389      'arg1', 'arg2',
390      '-DBOARD=<platform name>',
391      '-DSNIPPET=dummy snippet 1;ds2']),
392]
393
394@pytest.mark.parametrize(
395    'error_warns, f_stages,' \
396    ' return_code, is_instance_run, p_out, expect_returncode,' \
397    ' expect_filter, expect_writes, expected_status, expected_reason,' \
398    ' expected_cmd',
399    TESTDATA_2_2,
400    ids=['filter_stages with success', 'no stages with error']
401)
402@pytest.mark.parametrize('sys_platform', TESTDATA_2_1)
403def test_cmake_run_cmake(
404    sys_platform,
405    error_warns,
406    f_stages,
407    return_code,
408    is_instance_run,
409    p_out,
410    expect_returncode,
411    expect_filter,
412    expect_writes,
413    expected_status,
414    expected_reason,
415    expected_cmd
416):
417    process_mock = mock.Mock(
418        returncode=return_code,
419        communicate=mock.Mock(
420            return_value=(p_out.encode(sys.getdefaultencoding()), None)
421        )
422    )
423
424    def mock_popen(*args, **kwargs):
425        return process_mock
426
427    testsuite_mock = mock.Mock()
428    testsuite_mock.sysbuild = True
429    platform_mock = mock.Mock()
430    platform_mock.name = '<platform name>'
431    source_dir = os.path.join('source', 'dir')
432    build_dir = os.path.join('build', 'dir')
433    jobserver_mock = mock.Mock(
434        popen=mock.Mock(side_effect=mock_popen)
435    )
436    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
437    instance_mock.run = is_instance_run
438    instance_mock.run_id = 1
439    instance_mock.build_time = 0
440    instance_mock.status = None
441    instance_mock.reason = None
442    instance_mock.testsuite = mock.Mock()
443    instance_mock.testsuite.required_snippets = ['dummy snippet 1', 'ds2']
444    instance_mock.testcases = [mock.Mock(), mock.Mock()]
445    instance_mock.testcases[0].status = None
446    instance_mock.testcases[1].status = None
447
448    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
449                  jobserver_mock)
450    cmake.cwd = os.path.join('dummy', 'working', 'dir')
451    cmake.instance = instance_mock
452    cmake.options = mock.Mock()
453    cmake.options.disable_warnings_as_errors = not error_warns
454    cmake.options.overflow_as_errors = False
455    cmake.env = mock.Mock()
456    cmake.env.generator = 'dummy_generator'
457
458    cmake_path = os.path.join('dummy', 'cmake')
459
460    popen_mock = mock.Mock(side_effect=mock_popen)
461    change_mock = mock.Mock()
462
463    with mock.patch('sys.platform', sys_platform), \
464         mock.patch('shutil.which', return_value=cmake_path), \
465         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
466                    change_mock), \
467         mock.patch('twisterlib.runner.canonical_zephyr_base',
468                    'zephyr_base'), \
469         mock.patch('builtins.open', mock.mock_open()), \
470         mock.patch('subprocess.Popen', popen_mock):
471        result = cmake.run_cmake(args=['arg1', 'arg2'], filter_stages=f_stages)
472
473    expected_results = {}
474    if expect_returncode:
475        expected_results['returncode'] = return_code
476    if expect_filter:
477        expected_results['filter'] = {}
478    if expected_results == {}:
479        expected_results = None
480
481    assert expected_results == result
482
483    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
484                   popen_mock
485    popen_caller.assert_called_once_with(
486        expected_cmd,
487        stdout=subprocess.PIPE,
488        stderr=subprocess.STDOUT,
489        cwd=os.path.join('dummy', 'working', 'dir')
490    )
491
492    assert cmake.instance.status == expected_status
493    assert cmake.instance.reason == expected_reason
494
495    for tc in cmake.instance.testcases:
496        assert tc.status == cmake.instance.status
497
498
499TESTDATA_3 = [
500    ('unit_testing', [], False, True, None, True, None, True,
501     None, None, {}, {}, None, None, [], {}),
502    (
503        'other', [], True,
504        True, ['dummy', 'west', 'options'], True,
505        None, True,
506        os.path.join('domain', 'build', 'dir', 'zephyr', '.config'),
507        os.path.join('domain', 'build', 'dir', 'zephyr', 'edt.pickle'),
508        {'CONFIG_FOO': 'no'},
509        {'dummy cache elem': 1},
510        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
511         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
512        b'dummy edt pickle contents',
513        [f'Loaded sysbuild domain data from' \
514         f' {os.path.join("build", "dir", "domains.yaml")}'],
515        {os.path.join('other', 'dummy.testsuite.name'): True}
516    ),
517    (
518        'other', ['kconfig'], True,
519        True, ['dummy', 'west', 'options'], True,
520        'Dummy parse results', True,
521        os.path.join('build', 'dir', 'zephyr', '.config'),
522        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
523        {'CONFIG_FOO': 'no'},
524        {'dummy cache elem': 1},
525        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
526         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
527        b'dummy edt pickle contents',
528        [],
529        {os.path.join('other', 'dummy.testsuite.name'): False}
530    ),
531    (
532        'other', ['other'], False,
533        False, None, True,
534        'Dummy parse results', True,
535        None,
536        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
537        {},
538        {},
539        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True},
540        b'dummy edt pickle contents',
541        [],
542        {os.path.join('other', 'dummy.testsuite.name'): False}
543    ),
544    (
545        'other', ['other'], True,
546        False, None, True,
547        'Dummy parse results', True,
548        None,
549        None,
550        {},
551        {},
552        {},
553        None,
554        ['Sysbuild test will be skipped. West must be used for flashing.'],
555        {os.path.join('other', 'dummy.testsuite.name'): True}
556    ),
557    (
558        'other', ['other'], True,
559        False, ['--erase'], True,
560        'Dummy parse results', True,
561        None,
562        None,
563        {},
564        {},
565        None,
566        b'dummy edt pickle contents',
567        ['Sysbuild test will be skipped,' \
568         ' --erase is not supported with --west-flash'],
569        {os.path.join('other', 'dummy.testsuite.name'): True}
570    ),
571    (
572        'other', ['other'], False,
573        True, None, False,
574        'Dummy parse results', True,
575        None,
576        None,
577        {},
578        {'dummy cache elem': 1},
579        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
580         'dummy cache elem': 1},
581        None,
582        [],
583        {os.path.join('other', 'dummy.testsuite.name'): False}
584    ),
585    (
586        'other', ['other'], False,
587        True, None, True,
588        'Dummy parse results', True,
589        None,
590        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
591        {},
592        {'dummy cache elem': 1},
593        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
594         'dummy cache elem': 1},
595        b'dummy edt pickle contents',
596        [],
597        {os.path.join('other', 'dummy.testsuite.name'): False}
598    ),
599    (
600        'other', ['other'], False,
601        True, None, True,
602        None, True,
603        None,
604        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
605        {},
606        {'dummy cache elem': 1},
607        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
608         'dummy cache elem': 1},
609        b'dummy edt pickle contents',
610        [],
611        {os.path.join('other', 'dummy.testsuite.name'): True}
612    ),
613    (
614        'other', ['other'], False,
615        True, None, True,
616        'Dummy parse results', False,
617        None,
618        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
619        {},
620        {'dummy cache elem': 1},
621        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
622         'dummy cache elem': 1},
623        b'dummy edt pickle contents',
624        [],
625        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
626         'dummy cache elem': 1}
627    ),
628    (
629        'other', ['other'], False,
630        True, None, True,
631        SyntaxError, True,
632        None,
633        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
634        {},
635        {'dummy cache elem': 1},
636        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
637         'dummy cache elem': 1},
638        b'dummy edt pickle contents',
639        ['Failed processing testsuite.yaml'],
640        SyntaxError
641    ),
642]
643
644@pytest.mark.parametrize(
645    'platform_name, filter_stages, sysbuild,' \
646    ' do_find_cache, west_flash_options, edt_exists,' \
647    ' parse_results, testsuite_filter,' \
648    ' expected_defconfig_path, expected_edt_pickle_path,' \
649    ' expected_defconfig, expected_cmakecache, expected_filter_data,' \
650    ' expected_edt,' \
651    ' expected_logs, expected_return',
652    TESTDATA_3,
653    ids=['unit testing', 'domain', 'kconfig', 'no cache',
654         'no west options', 'erase west flash option', 'no edt',
655         'parse result', 'no parse result', 'no testsuite filter', 'parse err']
656)
657def test_filterbuilder_parse_generated(
658    caplog,
659    mocked_jobserver,
660    platform_name,
661    filter_stages,
662    sysbuild,
663    do_find_cache,
664    west_flash_options,
665    edt_exists,
666    parse_results,
667    testsuite_filter,
668    expected_defconfig_path,
669    expected_edt_pickle_path,
670    expected_defconfig,
671    expected_cmakecache,
672    expected_filter_data,
673    expected_edt,
674    expected_logs,
675    expected_return
676):
677    def mock_domains_from_file(*args, **kwargs):
678        dom = mock.Mock()
679        dom.build_dir = os.path.join('domain', 'build', 'dir')
680        res = mock.Mock(get_default_domain=mock.Mock(return_value=dom))
681        return res
682
683    def mock_cmakecache_from_file(*args, **kwargs):
684        if not do_find_cache:
685            raise FileNotFoundError(errno.ENOENT, 'Cache not found')
686        cache_elem = mock.Mock()
687        cache_elem.name = 'dummy cache elem'
688        cache_elem.value = 1
689        cache = [cache_elem]
690        return cache
691
692    def mock_open(filepath, type, *args, **kwargs):
693        if filepath == expected_defconfig_path:
694            rd = 'I am not a proper line\n' \
695                 'CONFIG_FOO="no"'
696        elif filepath == expected_edt_pickle_path:
697            rd = b'dummy edt pickle contents'
698        else:
699            raise FileNotFoundError(errno.ENOENT,
700                                    f'File {filepath} not mocked.')
701        return mock.mock_open(read_data=rd)()
702
703    def mock_parser(filter, filter_data, edt):
704        assert filter_data == expected_filter_data
705        if isinstance(parse_results, type) and \
706           issubclass(parse_results, Exception):
707            raise parse_results
708        return parse_results
709
710    def mock_pickle(datafile):
711        assert datafile.read() == expected_edt
712        return mock.Mock()
713
714    testsuite_mock = mock.Mock()
715    testsuite_mock.sysbuild = 'sysbuild' if sysbuild else None
716    testsuite_mock.name = 'dummy.testsuite.name'
717    testsuite_mock.filter = testsuite_filter
718    platform_mock = mock.Mock()
719    platform_mock.name = platform_name
720    platform_mock.arch = 'dummy arch'
721    source_dir = os.path.join('source', 'dir')
722    build_dir = os.path.join('build', 'dir')
723
724    fb = FilterBuilder(testsuite_mock, platform_mock, source_dir, build_dir,
725                       mocked_jobserver)
726    instance_mock = mock.Mock()
727    fb.instance = instance_mock
728    fb.env = mock.Mock()
729    fb.env.options = mock.Mock()
730    fb.env.options.west_flash = west_flash_options
731    fb.env.options.device_testing = True
732
733    environ_mock = {'env_dummy': True}
734
735    with mock.patch('twisterlib.runner.Domains.from_file',
736                    mock_domains_from_file), \
737         mock.patch('twisterlib.runner.CMakeCache.from_file',
738                    mock_cmakecache_from_file), \
739         mock.patch('builtins.open', mock_open), \
740         mock.patch('expr_parser.parse', mock_parser), \
741         mock.patch('pickle.load', mock_pickle), \
742         mock.patch('os.path.exists', return_value=edt_exists), \
743         mock.patch('os.environ', environ_mock), \
744         pytest.raises(expected_return) if \
745             isinstance(parse_results, type) and \
746             issubclass(parse_results, Exception) else nullcontext() as err:
747        result = fb.parse_generated(filter_stages)
748
749    if err:
750        assert True
751        return
752
753    assert all([log in caplog.text for log in expected_logs])
754
755    assert fb.defconfig == expected_defconfig
756
757    assert fb.cmake_cache == expected_cmakecache
758
759    assert result == expected_return
760
761
762TESTDATA_4 = [
763    (False, False, [f"see: {os.path.join('dummy', 'path', 'dummy_file.log')}"]),
764    (True, False, [os.path.join('dummy', 'path', 'dummy_file.log'),
765                    'file contents',
766                    os.path.join('dummy', 'path', 'dummy_file.log')]),
767    (True, True, [os.path.join('dummy', 'path', 'dummy_file.log'),
768                   'Unable to read log data ([Errno 2] ERROR: dummy_file.log)',
769                   os.path.join('dummy', 'path', 'dummy_file.log')]),
770]
771
772@pytest.mark.parametrize(
773    'inline_logs, read_exception, expected_logs',
774    TESTDATA_4,
775    ids=['basic', 'inline logs', 'inline logs+read_exception']
776)
777def test_projectbuilder_log_info(
778    caplog,
779    mocked_jobserver,
780    inline_logs,
781    read_exception,
782    expected_logs
783):
784    def mock_open(filename, *args, **kwargs):
785        if read_exception:
786            raise OSError(errno.ENOENT, f'ERROR: {os.path.basename(filename)}')
787        return mock.mock_open(read_data='file contents')()
788
789    def mock_realpath(filename, *args, **kwargs):
790        return os.path.join('path', filename)
791
792    def mock_abspath(filename, *args, **kwargs):
793        return os.path.join('dummy', filename)
794
795    filename = 'dummy_file.log'
796
797    env_mock = mock.Mock()
798    instance_mock = mock.Mock()
799
800    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
801    with mock.patch('builtins.open', mock_open), \
802         mock.patch('os.path.realpath', mock_realpath), \
803         mock.patch('os.path.abspath', mock_abspath):
804        pb.log_info(filename, inline_logs)
805
806    assert all([log in caplog.text for log in expected_logs])
807
808
809TESTDATA_5 = [
810    (True, False, False, "Valgrind error", 0, 0, 'build_dir/valgrind.log'),
811    (True, False, False, "Error", 0, 0, 'build_dir/build.log'),
812    (False, True, False, None, 1024, 0, 'build_dir/handler.log'),
813    (False, True, False, None, 0, 0, 'build_dir/build.log'),
814    (False, False, True, None, 0, 1024, 'build_dir/device.log'),
815    (False, False, True, None, 0, 0, 'build_dir/build.log'),
816    (False, False, False, None, 0, 0, 'build_dir/build.log'),
817]
818
819@pytest.mark.parametrize(
820    'valgrind_log_exists, handler_log_exists, device_log_exists,' \
821    ' instance_reason, handler_log_getsize, device_log_getsize, expected_log',
822    TESTDATA_5,
823    ids=['valgrind log', 'valgrind log unused',
824         'handler log', 'handler log unused',
825         'device log', 'device log unused',
826         'no logs']
827)
828def test_projectbuilder_log_info_file(
829    caplog,
830    mocked_jobserver,
831    valgrind_log_exists,
832    handler_log_exists,
833    device_log_exists,
834    instance_reason,
835    handler_log_getsize,
836    device_log_getsize,
837    expected_log
838):
839    def mock_exists(filename, *args, **kwargs):
840        if filename == 'build_dir/handler.log':
841            return handler_log_exists
842        if filename == 'build_dir/valgrind.log':
843            return valgrind_log_exists
844        if filename == 'build_dir/device.log':
845            return device_log_exists
846        return False
847
848    def mock_getsize(filename, *args, **kwargs):
849        if filename == 'build_dir/handler.log':
850            return handler_log_getsize
851        if filename == 'build_dir/device.log':
852            return device_log_getsize
853        return 0
854
855    env_mock = mock.Mock()
856    instance_mock = mock.Mock()
857    instance_mock.reason = instance_reason
858    instance_mock.build_dir = 'build_dir'
859
860    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
861
862    log_info_mock = mock.Mock()
863
864    with mock.patch('os.path.exists', mock_exists), \
865         mock.patch('os.path.getsize', mock_getsize), \
866         mock.patch('twisterlib.runner.ProjectBuilder.log_info', log_info_mock):
867        pb.log_info_file(None)
868
869    log_info_mock.assert_called_with(expected_log, mock.ANY)
870
871
872TESTDATA_6 = [
873    (
874        {'op': 'filter'},
875        'failed',
876        'Failed',
877        mock.ANY,
878        mock.ANY,
879        mock.ANY,
880        mock.ANY,
881        mock.ANY,
882        mock.ANY,
883        mock.ANY,
884        mock.ANY,
885        mock.ANY,
886        mock.ANY,
887        [],
888        {'op': 'report', 'test': mock.ANY},
889        'failed',
890        'Failed',
891        0,
892        None
893    ),
894    (
895        {'op': 'filter'},
896        'passed',
897        mock.ANY,
898        mock.ANY,
899        mock.ANY,
900        mock.ANY,
901        mock.ANY,
902        mock.ANY,
903        mock.ANY,
904        {'filter': { 'dummy instance name': True }},
905        mock.ANY,
906        mock.ANY,
907        mock.ANY,
908        ['filtering dummy instance name'],
909        {'op': 'report', 'test': mock.ANY},
910        'filtered',
911        'runtime filter',
912        1,
913        ('skipped',)
914    ),
915    (
916        {'op': 'filter'},
917        'passed',
918        mock.ANY,
919        mock.ANY,
920        mock.ANY,
921        mock.ANY,
922        mock.ANY,
923        mock.ANY,
924        mock.ANY,
925        {'filter': { 'another dummy instance name': True }},
926        mock.ANY,
927        mock.ANY,
928        mock.ANY,
929        [],
930        {'op': 'cmake', 'test': mock.ANY},
931        'passed',
932        mock.ANY,
933        0,
934        None
935    ),
936    (
937        {'op': 'cmake'},
938        'error',
939        'dummy error',
940        mock.ANY,
941        mock.ANY,
942        mock.ANY,
943        mock.ANY,
944        mock.ANY,
945        mock.ANY,
946        mock.ANY,
947        mock.ANY,
948        mock.ANY,
949        mock.ANY,
950        [],
951        {'op': 'report', 'test': mock.ANY},
952        'error',
953        'dummy error',
954        0,
955        None
956    ),
957    (
958        {'op': 'cmake'},
959        None,
960        mock.ANY,
961        mock.ANY,
962        mock.ANY,
963        True,
964        mock.ANY,
965        mock.ANY,
966        mock.ANY,
967        mock.ANY,
968        mock.ANY,
969        mock.ANY,
970        mock.ANY,
971        [],
972        {'op': 'report', 'test': mock.ANY},
973        'passed',
974        mock.ANY,
975        0,
976        None
977    ),
978    (
979        {'op': 'cmake'},
980        'success',
981        mock.ANY,
982        mock.ANY,
983        mock.ANY,
984        True,
985        mock.ANY,
986        mock.ANY,
987        mock.ANY,
988        mock.ANY,
989        mock.ANY,
990        mock.ANY,
991        mock.ANY,
992        [],
993        {'op': 'report', 'test': mock.ANY},
994        'success',
995        mock.ANY,
996        0,
997        None
998    ),
999    (
1000        {'op': 'cmake'},
1001        'success',
1002        mock.ANY,
1003        mock.ANY,
1004        mock.ANY,
1005        False,
1006        mock.ANY,
1007        mock.ANY,
1008        mock.ANY,
1009        {'filter': {'dummy instance name': True}},
1010        mock.ANY,
1011        mock.ANY,
1012        mock.ANY,
1013        ['filtering dummy instance name'],
1014        {'op': 'report', 'test': mock.ANY},
1015        'filtered',
1016        'runtime filter',
1017        1,
1018        ('skipped',)
1019    ),
1020    (
1021        {'op': 'cmake'},
1022        'success',
1023        mock.ANY,
1024        mock.ANY,
1025        mock.ANY,
1026        False,
1027        mock.ANY,
1028        mock.ANY,
1029        mock.ANY,
1030        {'filter': {}},
1031        mock.ANY,
1032        mock.ANY,
1033        mock.ANY,
1034        [],
1035        {'op': 'build', 'test': mock.ANY},
1036        'success',
1037        mock.ANY,
1038        0,
1039        None
1040    ),
1041    (
1042        {'op': 'build'},
1043        mock.ANY,
1044        None,
1045        mock.ANY,
1046        mock.ANY,
1047        mock.ANY,
1048        mock.ANY,
1049        mock.ANY,
1050        mock.ANY,
1051        mock.ANY,
1052        None,
1053        mock.ANY,
1054        mock.ANY,
1055        ['build test: dummy instance name'],
1056        {'op': 'report', 'test': mock.ANY},
1057        'error',
1058        'Build Failure',
1059        0,
1060        None
1061    ),
1062    (
1063        {'op': 'build'},
1064        'skipped',
1065        mock.ANY,
1066        mock.ANY,
1067        mock.ANY,
1068        mock.ANY,
1069        mock.ANY,
1070        mock.ANY,
1071        mock.ANY,
1072        mock.ANY,
1073        {'returncode': 0},
1074        mock.ANY,
1075        mock.ANY,
1076        ['build test: dummy instance name',
1077         'Determine test cases for test instance: dummy instance name'],
1078        {'op': 'gather_metrics', 'test': mock.ANY},
1079        mock.ANY,
1080        mock.ANY,
1081        1,
1082        ('skipped', mock.ANY)
1083    ),
1084    (
1085        {'op': 'build'},
1086        'passed',
1087        mock.ANY,
1088        mock.ANY,
1089        mock.ANY,
1090        mock.ANY,
1091        mock.ANY,
1092        mock.ANY,
1093        mock.ANY,
1094        mock.ANY,
1095        {'dummy': 'dummy'},
1096        mock.ANY,
1097        mock.ANY,
1098        ['build test: dummy instance name'],
1099        {'op': 'report', 'test': mock.ANY},
1100        'passed',
1101        mock.ANY,
1102        0,
1103        ('blocked', mock.ANY)
1104    ),
1105    (
1106        {'op': 'build'},
1107        'success',
1108        mock.ANY,
1109        mock.ANY,
1110        mock.ANY,
1111        mock.ANY,
1112        mock.ANY,
1113        mock.ANY,
1114        mock.ANY,
1115        mock.ANY,
1116        {'returncode': 0},
1117        mock.ANY,
1118        mock.ANY,
1119        ['build test: dummy instance name',
1120         'Determine test cases for test instance: dummy instance name'],
1121        {'op': 'gather_metrics', 'test': mock.ANY},
1122        mock.ANY,
1123        mock.ANY,
1124        0,
1125        None
1126    ),
1127    (
1128        {'op': 'build'},
1129        'success',
1130        mock.ANY,
1131        mock.ANY,
1132        mock.ANY,
1133        mock.ANY,
1134        mock.ANY,
1135        mock.ANY,
1136        mock.ANY,
1137        mock.ANY,
1138        {'returncode': 0},
1139        mock.ANY,
1140        BuildError,
1141        ['build test: dummy instance name',
1142         'Determine test cases for test instance: dummy instance name'],
1143        {'op': 'report', 'test': mock.ANY},
1144        'error',
1145        'Determine Testcases Error!',
1146        0,
1147        None
1148    ),
1149    (
1150        {'op': 'gather_metrics'},
1151        mock.ANY,
1152        mock.ANY,
1153        True,
1154        True,
1155        mock.ANY,
1156        mock.ANY,
1157        mock.ANY,
1158        mock.ANY,
1159        mock.ANY,
1160        mock.ANY,
1161        mock.ANY,
1162        mock.ANY,
1163        [],
1164        {'op': 'run', 'test': mock.ANY},
1165        mock.ANY,
1166        mock.ANY,
1167        0,
1168        None
1169    ),
1170    (
1171        {'op': 'gather_metrics'},
1172        mock.ANY,
1173        mock.ANY,
1174        False,
1175        True,
1176        mock.ANY,
1177        mock.ANY,
1178        mock.ANY,
1179        mock.ANY,
1180        mock.ANY,
1181        mock.ANY,
1182        mock.ANY,
1183        mock.ANY,
1184        [],
1185        {'op': 'report', 'test': mock.ANY},
1186        mock.ANY,
1187        mock.ANY,
1188        0,
1189        None
1190    ),
1191    (
1192        {'op': 'run'},
1193        'success',
1194        'OK',
1195        mock.ANY,
1196        mock.ANY,
1197        mock.ANY,
1198        mock.ANY,
1199        mock.ANY,
1200        mock.ANY,
1201        mock.ANY,
1202        mock.ANY,
1203        None,
1204        mock.ANY,
1205        ['run test: dummy instance name',
1206         'run status: dummy instance name success'],
1207        {'op': 'report', 'test': mock.ANY, 'status': 'success', 'reason': 'OK'},
1208        'success',
1209        'OK',
1210        0,
1211        None
1212    ),
1213    (
1214        {'op': 'run'},
1215        'failed',
1216        mock.ANY,
1217        mock.ANY,
1218        mock.ANY,
1219        mock.ANY,
1220        mock.ANY,
1221        mock.ANY,
1222        mock.ANY,
1223        mock.ANY,
1224        mock.ANY,
1225        RuntimeError,
1226        mock.ANY,
1227        ['run test: dummy instance name',
1228         'run status: dummy instance name failed',
1229         'RuntimeError: Pipeline Error!'],
1230        None,
1231        'failed',
1232        mock.ANY,
1233        0,
1234        None
1235    ),
1236    (
1237        {'op': 'report'},
1238        mock.ANY,
1239        mock.ANY,
1240        mock.ANY,
1241        mock.ANY,
1242        mock.ANY,
1243        False,
1244        True,
1245        mock.ANY,
1246        mock.ANY,
1247        mock.ANY,
1248        mock.ANY,
1249        mock.ANY,
1250        [],
1251        {'op': 'cleanup', 'mode': 'device', 'test': mock.ANY},
1252        mock.ANY,
1253        mock.ANY,
1254        0,
1255        None
1256    ),
1257    (
1258        {'op': 'report'},
1259        'passed',
1260        mock.ANY,
1261        mock.ANY,
1262        mock.ANY,
1263        mock.ANY,
1264        False,
1265        False,
1266        'pass',
1267        mock.ANY,
1268        mock.ANY,
1269        mock.ANY,
1270        mock.ANY,
1271        [],
1272        {'op': 'cleanup', 'mode': 'passed', 'test': mock.ANY},
1273        mock.ANY,
1274        mock.ANY,
1275        0,
1276        None
1277    ),
1278    (
1279        {'op': 'report'},
1280        mock.ANY,
1281        mock.ANY,
1282        mock.ANY,
1283        mock.ANY,
1284        mock.ANY,
1285        False,
1286        False,
1287        'all',
1288        mock.ANY,
1289        mock.ANY,
1290        mock.ANY,
1291        mock.ANY,
1292        [],
1293        {'op': 'cleanup', 'mode': 'all', 'test': mock.ANY},
1294        mock.ANY,
1295        mock.ANY,
1296        0,
1297        None
1298    ),
1299    (
1300        {'op': 'report'},
1301        mock.ANY,
1302        mock.ANY,
1303        mock.ANY,
1304        mock.ANY,
1305        mock.ANY,
1306        False,
1307        False,
1308        'other',
1309        mock.ANY,
1310        mock.ANY,
1311        mock.ANY,
1312        mock.ANY,
1313        [],
1314        None,
1315        mock.ANY,
1316        mock.ANY,
1317        0,
1318        None
1319    ),
1320    (
1321        {'op': 'cleanup', 'mode': 'device'},
1322        mock.ANY,
1323        mock.ANY,
1324        mock.ANY,
1325        mock.ANY,
1326        mock.ANY,
1327        mock.ANY,
1328        mock.ANY,
1329        mock.ANY,
1330        mock.ANY,
1331        mock.ANY,
1332        mock.ANY,
1333        mock.ANY,
1334        [],
1335        None,
1336        mock.ANY,
1337        mock.ANY,
1338        0,
1339        None
1340    ),
1341    (
1342        {'op': 'cleanup', 'mode': 'passed'},
1343        mock.ANY,
1344        mock.ANY,
1345        mock.ANY,
1346        mock.ANY,
1347        mock.ANY,
1348        mock.ANY,
1349        mock.ANY,
1350        mock.ANY,
1351        mock.ANY,
1352        mock.ANY,
1353        mock.ANY,
1354        mock.ANY,
1355        [],
1356        None,
1357        mock.ANY,
1358        mock.ANY,
1359        0,
1360        None
1361    ),
1362    (
1363        {'op': 'cleanup', 'mode': 'all'},
1364        mock.ANY,
1365        'Valgrind error',
1366        mock.ANY,
1367        mock.ANY,
1368        mock.ANY,
1369        mock.ANY,
1370        mock.ANY,
1371        mock.ANY,
1372        mock.ANY,
1373        mock.ANY,
1374        mock.ANY,
1375        mock.ANY,
1376        [],
1377        None,
1378        mock.ANY,
1379        mock.ANY,
1380        0,
1381        None
1382    ),
1383    (
1384        {'op': 'cleanup', 'mode': 'all'},
1385        mock.ANY,
1386        'Cmake build failure',
1387        mock.ANY,
1388        mock.ANY,
1389        mock.ANY,
1390        mock.ANY,
1391        mock.ANY,
1392        mock.ANY,
1393        mock.ANY,
1394        mock.ANY,
1395        mock.ANY,
1396        mock.ANY,
1397        [],
1398        None,
1399        mock.ANY,
1400        mock.ANY,
1401        0,
1402        None
1403    ),
1404]
1405
1406@pytest.mark.parametrize(
1407    'message,' \
1408    ' instance_status, instance_reason, instance_run, instance_handler_ready,' \
1409    ' options_cmake_only,' \
1410    ' options_coverage, options_prep_artifacts, options_runtime_artifacts,' \
1411    ' cmake_res, build_res,' \
1412    ' pipeline_runtime_error, determine_testcases_build_error,' \
1413    ' expected_logs, resulting_message,' \
1414    ' expected_status, expected_reason, expected_skipped, expected_missing',
1415    TESTDATA_6,
1416    ids=[
1417        'filter, failed', 'filter, cmake res', 'filter, no cmake res',
1418        'cmake, failed', 'cmake, cmake_only, no status', 'cmake, cmake_only',
1419        'cmake, no cmake_only, cmake res', 'cmake, no cmake_only, no cmake res',
1420        'build, no build res', 'build, skipped', 'build, blocked',
1421        'build, determine testcases', 'build, determine testcases Error',
1422        'gather metrics, run and ready handler', 'gather metrics',
1423        'run', 'run, Pipeline Runtime Error',
1424        'report, prep artifacts for testing',
1425        'report, runtime artifact cleanup pass, status passed',
1426        'report, runtime artifact cleanup all', 'report, no message put',
1427        'cleanup, device', 'cleanup, mode passed', 'cleanup, mode all',
1428        'cleanup, mode all, cmake build failure'
1429    ]
1430)
1431def test_projectbuilder_process(
1432    caplog,
1433    mocked_jobserver,
1434    message,
1435    instance_status,
1436    instance_reason,
1437    instance_run,
1438    instance_handler_ready,
1439    options_cmake_only,
1440    options_coverage,
1441    options_prep_artifacts,
1442    options_runtime_artifacts,
1443    cmake_res,
1444    build_res,
1445    pipeline_runtime_error,
1446    determine_testcases_build_error,
1447    expected_logs,
1448    resulting_message,
1449    expected_status,
1450    expected_reason,
1451    expected_skipped,
1452    expected_missing
1453):
1454    def mock_pipeline_put(msg):
1455        if isinstance(pipeline_runtime_error, type) and \
1456           issubclass(pipeline_runtime_error, Exception):
1457            raise RuntimeError('Pipeline Error!')
1458
1459    def mock_determine_testcases(res):
1460        if isinstance(determine_testcases_build_error, type) and \
1461           issubclass(determine_testcases_build_error, Exception):
1462            raise BuildError('Determine Testcases Error!')
1463
1464    instance_mock = mock.Mock()
1465    instance_mock.name = 'dummy instance name'
1466    instance_mock.status = instance_status
1467    instance_mock.reason = instance_reason
1468    instance_mock.run = instance_run
1469    instance_mock.handler = mock.Mock()
1470    instance_mock.handler.ready = instance_handler_ready
1471    env_mock = mock.Mock()
1472
1473    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1474    pb.options = mock.Mock()
1475    pb.options.coverage = options_coverage
1476    pb.options.prep_artifacts_for_testing = options_prep_artifacts
1477    pb.options.runtime_artifact_cleanup = options_runtime_artifacts
1478    pb.options.cmake_only = options_cmake_only
1479
1480    pb.cmake = mock.Mock(return_value=cmake_res)
1481    pb.build = mock.Mock(return_value=build_res)
1482    pb.determine_testcases = mock.Mock(side_effect=mock_determine_testcases)
1483
1484    pb.report_out = mock.Mock()
1485    pb.cleanup_artifacts = mock.Mock()
1486    pb.cleanup_device_testing_artifacts = mock.Mock()
1487    pb.run = mock.Mock()
1488    pb.gather_metrics = mock.Mock()
1489
1490    pipeline_mock = mock.Mock(put=mock.Mock(side_effect=mock_pipeline_put))
1491    done_mock = mock.Mock()
1492    lock_mock = mock.Mock(
1493        __enter__=mock.Mock(return_value=(mock.Mock(), mock.Mock())),
1494        __exit__=mock.Mock(return_value=None)
1495    )
1496    results_mock = mock.Mock()
1497    results_mock.skipped_runtime = 0
1498
1499    pb.process(pipeline_mock, done_mock, message, lock_mock, results_mock)
1500
1501    assert all([log in caplog.text for log in expected_logs])
1502
1503    if resulting_message:
1504        pipeline_mock.put.assert_called_with(resulting_message)
1505
1506    assert pb.instance.status == expected_status
1507    assert pb.instance.reason == expected_reason
1508    assert results_mock.skipped_runtime == expected_skipped
1509
1510    if expected_missing:
1511        pb.instance.add_missing_case_status.assert_called_with(*expected_missing)
1512
1513
1514TESTDATA_7 = [
1515    (
1516        [
1517            'z_ztest_unit_test__dummy_suite_name__dummy_test_name',
1518            'z_ztest_unit_test__dummy_suite_name__test_dummy_name',
1519            'no match'
1520        ],
1521        ['dummy_id.dummy_name', 'dummy_id.dummy_name']
1522    ),
1523    (
1524        ['no match'],
1525        []
1526    ),
1527]
1528
1529@pytest.mark.parametrize(
1530    'symbols_names, added_tcs',
1531    TESTDATA_7,
1532    ids=['two hits, one miss', 'nothing']
1533)
1534def test_projectbuilder_determine_testcases(
1535    mocked_jobserver,
1536    symbols_names,
1537    added_tcs
1538):
1539    symbols_mock = [mock.Mock(n=name) for name in symbols_names]
1540    for m in symbols_mock:
1541        m.configure_mock(name=m.n)
1542
1543    sections_mock = [mock.Mock(spec=SymbolTableSection)]
1544    sections_mock[0].iter_symbols = mock.Mock(return_value=symbols_mock)
1545
1546    elf_mock = mock.Mock()
1547    elf_mock().iter_sections = mock.Mock(return_value=sections_mock)
1548
1549    results_mock = mock.Mock()
1550
1551    instance_mock = mock.Mock()
1552    instance_mock.testcases = []
1553    instance_mock.testsuite.id = 'dummy_id'
1554    env_mock = mock.Mock()
1555
1556    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1557
1558    with mock.patch('twisterlib.runner.ELFFile', elf_mock), \
1559         mock.patch('builtins.open', mock.mock_open()):
1560        pb.determine_testcases(results_mock)
1561
1562    pb.instance.add_testcase.assert_has_calls(
1563        [mock.call(name=x) for x in added_tcs]
1564    )
1565    pb.instance.testsuite.add_testcase.assert_has_calls(
1566        [mock.call(name=x) for x in added_tcs]
1567    )
1568
1569
1570TESTDATA_8 = [
1571    (
1572        ['addition.al'],
1573        'dummy',
1574        ['addition.al', '.config', 'zephyr']
1575    ),
1576    (
1577        [],
1578        'all',
1579        ['.config', 'zephyr', 'testsuite_extra.conf', 'twister']
1580    ),
1581]
1582
1583@pytest.mark.parametrize(
1584    'additional_keep, runtime_artifact_cleanup, expected_files',
1585    TESTDATA_8,
1586    ids=['additional keep', 'all cleanup']
1587)
1588def test_projectbuilder_cleanup_artifacts(
1589    tmpdir,
1590    mocked_jobserver,
1591    additional_keep,
1592    runtime_artifact_cleanup,
1593    expected_files
1594):
1595    # tmpdir
1596    # ┣ twister
1597    # ┃ ┗ testsuite_extra.conf
1598    # ┣ dummy_dir
1599    # ┃ ┗ dummy.del
1600    # ┣ dummy_link_dir -> zephyr
1601    # ┣ zephyr
1602    # ┃ ┗ .config
1603    # ┗ addition.al
1604    twister_dir = tmpdir.mkdir('twister')
1605    testsuite_extra_conf = twister_dir.join('testsuite_extra.conf')
1606    testsuite_extra_conf.write_text('dummy', 'utf-8')
1607
1608    dummy_dir = tmpdir.mkdir('dummy_dir')
1609    dummy_del = dummy_dir.join('dummy.del')
1610    dummy_del.write_text('dummy', 'utf-8')
1611
1612    zephyr = tmpdir.mkdir('zephyr')
1613    config = zephyr.join('.config')
1614    config.write_text('dummy', 'utf-8')
1615
1616    dummy_link_dir = tmpdir.join('dummy_link_dir')
1617    os.symlink(zephyr, dummy_link_dir)
1618
1619    addition_al = tmpdir.join('addition.al')
1620    addition_al.write_text('dummy', 'utf-8')
1621
1622    instance_mock = mock.Mock()
1623    instance_mock.build_dir = tmpdir
1624    env_mock = mock.Mock()
1625
1626    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1627    pb.options = mock.Mock(runtime_artifact_cleanup=runtime_artifact_cleanup)
1628
1629    pb.cleanup_artifacts(additional_keep)
1630
1631    files_left = [p.name for p in list(pathlib.Path(tmpdir).glob('**/*'))]
1632
1633    assert sorted(files_left) == sorted(expected_files)
1634
1635
1636def test_projectbuilder_cleanup_device_testing_artifacts(
1637    caplog,
1638    mocked_jobserver
1639):
1640    bins = [os.path.join('zephyr', 'file.bin')]
1641
1642    instance_mock = mock.Mock()
1643    instance_mock.testsuite.sysbuild = False
1644    build_dir = os.path.join('build', 'dir')
1645    instance_mock.build_dir = build_dir
1646    env_mock = mock.Mock()
1647
1648    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1649    pb._get_binaries = mock.Mock(return_value=bins)
1650    pb.cleanup_artifacts = mock.Mock()
1651    pb._sanitize_files = mock.Mock()
1652
1653    pb.cleanup_device_testing_artifacts()
1654
1655    assert f'Cleaning up for Device Testing {build_dir}' in caplog.text
1656
1657    pb.cleanup_artifacts.assert_called_once_with(
1658        [os.path.join('zephyr', 'file.bin'),
1659         os.path.join('zephyr', 'runners.yaml')]
1660    )
1661    pb._sanitize_files.assert_called_once()
1662
1663
1664TESTDATA_9 = [
1665    (
1666        None,
1667        [],
1668        [os.path.join('zephyr', 'zephyr.hex'),
1669         os.path.join('zephyr', 'zephyr.bin'),
1670         os.path.join('zephyr', 'zephyr.elf'),
1671         os.path.join('zephyr', 'zephyr.exe')]
1672    ),
1673    (
1674        [os.path.join('dummy.bin'), os.path.join('dummy.hex')],
1675        [os.path.join('dir2', 'dummy.elf')],
1676        [os.path.join('zephyr', 'dummy.bin'),
1677         os.path.join('zephyr', 'dummy.hex'),
1678         os.path.join('dir2', 'dummy.elf')]
1679    ),
1680]
1681
1682@pytest.mark.parametrize(
1683    'platform_binaries, runner_binaries, expected_binaries',
1684    TESTDATA_9,
1685    ids=['default', 'valid']
1686)
1687def test_projectbuilder_get_binaries(
1688    mocked_jobserver,
1689    platform_binaries,
1690    runner_binaries,
1691    expected_binaries
1692):
1693    def mock_get_domains(*args, **kwargs):
1694        return []
1695
1696    instance_mock = mock.Mock()
1697    instance_mock.build_dir = os.path.join('build', 'dir')
1698    instance_mock.domains.get_domains.side_effect = mock_get_domains
1699    instance_mock.platform = mock.Mock()
1700    instance_mock.platform.binaries = platform_binaries
1701    env_mock = mock.Mock()
1702
1703    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1704    pb._get_binaries_from_runners = mock.Mock(return_value=runner_binaries)
1705
1706    bins = pb._get_binaries()
1707
1708    assert all(bin in expected_binaries for bin in bins)
1709    assert all(bin in bins for bin in expected_binaries)
1710
1711
1712TESTDATA_10 = [
1713    (None, None, []),
1714    (None, {'dummy': 'dummy'}, []),
1715    (   None,
1716        {
1717            'config': {
1718                'elf_file': '/absolute/path/dummy.elf',
1719                'bin_file': 'path/dummy.bin'
1720            }
1721        },
1722        ['/absolute/path/dummy.elf', os.path.join('zephyr', 'path/dummy.bin')]
1723    ),
1724    (   'test_domain',
1725        {
1726            'config': {
1727                'elf_file': '/absolute/path/dummy.elf',
1728                'bin_file': 'path/dummy.bin'
1729            }
1730        },
1731        ['/absolute/path/dummy.elf', os.path.join('test_domain', 'zephyr', 'path/dummy.bin')]
1732    ),
1733]
1734
1735@pytest.mark.parametrize(
1736    'domain, runners_content, expected_binaries',
1737    TESTDATA_10,
1738    ids=['no file', 'no config', 'valid', 'with domain']
1739)
1740def test_projectbuilder_get_binaries_from_runners(
1741    mocked_jobserver,
1742    domain,
1743    runners_content,
1744    expected_binaries
1745):
1746    def mock_exists(fname):
1747        assert fname == os.path.join('build', 'dir', domain if domain else '',
1748                                     'zephyr', 'runners.yaml')
1749        return runners_content is not None
1750
1751    instance_mock = mock.Mock()
1752    instance_mock.build_dir = os.path.join('build', 'dir')
1753    env_mock = mock.Mock()
1754
1755    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1756
1757    with mock.patch('os.path.exists', mock_exists), \
1758         mock.patch('builtins.open', mock.mock_open()), \
1759         mock.patch('yaml.safe_load', return_value=runners_content):
1760        if domain:
1761            bins = pb._get_binaries_from_runners(domain)
1762        else:
1763            bins = pb._get_binaries_from_runners()
1764
1765    assert all(bin in expected_binaries for bin in bins)
1766    assert all(bin in bins for bin in expected_binaries)
1767
1768
1769def test_projectbuilder_sanitize_files(mocked_jobserver):
1770    instance_mock = mock.Mock()
1771    env_mock = mock.Mock()
1772
1773    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1774    pb._sanitize_runners_file = mock.Mock()
1775    pb._sanitize_zephyr_base_from_files = mock.Mock()
1776
1777    pb._sanitize_files()
1778
1779    pb._sanitize_runners_file.assert_called_once()
1780    pb._sanitize_zephyr_base_from_files.assert_called_once()
1781
1782
1783
1784TESTDATA_11 = [
1785    (None, None),
1786    ('dummy: []', None),
1787    (
1788"""
1789config:
1790  elf_file: relative/path/dummy.elf
1791  hex_file: /absolute/path/build_dir/zephyr/dummy.hex
1792""",
1793"""
1794config:
1795  elf_file: relative/path/dummy.elf
1796  hex_file: dummy.hex
1797"""
1798    ),
1799]
1800
1801@pytest.mark.parametrize(
1802    'runners_text, expected_write_text',
1803    TESTDATA_11,
1804    ids=['no file', 'no config', 'valid']
1805)
1806def test_projectbuilder_sanitize_runners_file(
1807    mocked_jobserver,
1808    runners_text,
1809    expected_write_text
1810):
1811    def mock_exists(fname):
1812        return runners_text is not None
1813
1814    instance_mock = mock.Mock()
1815    instance_mock.build_dir = '/absolute/path/build_dir'
1816    env_mock = mock.Mock()
1817
1818    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1819
1820    with mock.patch('os.path.exists', mock_exists), \
1821         mock.patch('builtins.open',
1822                    mock.mock_open(read_data=runners_text)) as f:
1823        pb._sanitize_runners_file()
1824
1825    if expected_write_text is not None:
1826        f().write.assert_called_with(expected_write_text)
1827    else:
1828        f().write.assert_not_called()
1829
1830
1831TESTDATA_12 = [
1832    (
1833        {
1834            'CMakeCache.txt': mock.mock_open(
1835                read_data='canonical/zephyr/base/dummy.file: ERROR'
1836            )
1837        },
1838        {
1839            'CMakeCache.txt': 'dummy.file: ERROR'
1840        }
1841    ),
1842    (
1843        {
1844            os.path.join('zephyr', 'runners.yaml'): mock.mock_open(
1845                read_data='There was canonical/zephyr/base/dummy.file here'
1846            )
1847        },
1848        {
1849            os.path.join('zephyr', 'runners.yaml'): 'There was dummy.file here'
1850        }
1851    ),
1852]
1853
1854@pytest.mark.parametrize(
1855    'text_mocks, expected_write_texts',
1856    TESTDATA_12,
1857    ids=['CMakeCache file', 'runners.yaml file']
1858)
1859def test_projectbuilder_sanitize_zephyr_base_from_files(
1860    mocked_jobserver,
1861    text_mocks,
1862    expected_write_texts
1863):
1864    build_dir_path = 'canonical/zephyr/base/build_dir/'
1865
1866    def mock_exists(fname):
1867        if not fname.startswith(build_dir_path):
1868            return False
1869        return fname[len(build_dir_path):] in text_mocks
1870
1871    def mock_open(fname, *args, **kwargs):
1872        if not fname.startswith(build_dir_path):
1873            raise FileNotFoundError(errno.ENOENT, f'File {fname} not found.')
1874        return text_mocks[fname[len(build_dir_path):]]()
1875
1876    instance_mock = mock.Mock()
1877    instance_mock.build_dir = build_dir_path
1878    env_mock = mock.Mock()
1879
1880    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1881
1882    with mock.patch('os.path.exists', mock_exists), \
1883         mock.patch('builtins.open', mock_open), \
1884         mock.patch('twisterlib.runner.canonical_zephyr_base',
1885                    'canonical/zephyr/base'):
1886        pb._sanitize_zephyr_base_from_files()
1887
1888    for fname, fhandler in text_mocks.items():
1889        fhandler().write.assert_called_with(expected_write_texts[fname])
1890
1891
1892TESTDATA_13 = [
1893    (
1894        'error', True, True, False,
1895        ['INFO      20/25 dummy platform' \
1896         '            dummy.testsuite.name' \
1897         '                                ERROR dummy reason (cmake)'],
1898        None
1899    ),
1900    (
1901        'failed', False, False, False,
1902        ['ERROR     dummy platform' \
1903         '            dummy.testsuite.name' \
1904         '                                FAILED : dummy reason'],
1905        'INFO    - Total complete:   20/  25  80%  skipped:    3,' \
1906        ' failed:    3, error:    1'
1907    ),
1908    (
1909        'skipped', True, False, False,
1910        ['INFO      20/25 dummy platform' \
1911         '            dummy.testsuite.name' \
1912         '                               SKIPPED (dummy reason)'],
1913        None
1914    ),
1915    (
1916        'filtered', False, False, False,
1917        [],
1918        'INFO    - Total complete:   20/  25  80%  skipped:    4,' \
1919        ' failed:    2, error:    1'
1920    ),
1921    (
1922        'passed', True, False, True,
1923        ['INFO      20/25 dummy platform' \
1924         '            dummy.testsuite.name' \
1925         '                               PASSED' \
1926         ' (dummy handler type: dummy dut, 60.000s)'],
1927        None
1928    ),
1929    (
1930        'passed', True, False, False,
1931        ['INFO      20/25 dummy platform' \
1932         '            dummy.testsuite.name' \
1933         '                               PASSED (build)'],
1934        None
1935    ),
1936    (
1937        'unknown status', False, False, False,
1938        ['Unknown status = unknown status'],
1939        'INFO    - Total complete:   20/  25  80%  skipped:    3,' \
1940        ' failed:    2, error:    1\r'
1941    ),
1942    (
1943        'timeout', True, False, True,
1944        ['INFO      20/25 dummy platform' \
1945         '            dummy.testsuite.name' \
1946         '                               UNKNOWN' \
1947         ' (dummy handler type: dummy dut, 60.000s/seed: 123)'],
1948        None
1949    ),
1950]
1951
1952@pytest.mark.parametrize(
1953    'status, verbose, cmake_only, ready_run, expected_logs, expected_out',
1954    TESTDATA_13,
1955    ids=['verbose error cmake only', 'failed', 'verbose skipped', 'filtered',
1956         'verbose passed ready run', 'verbose passed', 'unknown status',
1957         'timeout']
1958)
1959def test_projectbuilder_report_out(
1960    capfd,
1961    caplog,
1962    mocked_jobserver,
1963    status,
1964    verbose,
1965    cmake_only,
1966    ready_run,
1967    expected_logs,
1968    expected_out
1969):
1970    instance_mock = mock.Mock()
1971    instance_mock.handler.type_str = 'dummy handler type'
1972    instance_mock.handler.seed = 123
1973    instance_mock.handler.ready = ready_run
1974    instance_mock.run = ready_run
1975    instance_mock.dut = 'dummy dut'
1976    instance_mock.execution_time = 60
1977    instance_mock.platform.name = 'dummy platform'
1978    instance_mock.status = status
1979    instance_mock.reason = 'dummy reason'
1980    instance_mock.testsuite.name = 'dummy.testsuite.name'
1981    instance_mock.testsuite.testcases = [mock.Mock() for _ in range(25)]
1982    instance_mock.testcases = [mock.Mock() for _ in range(24)] + \
1983                              [mock.Mock(status='skipped')]
1984    env_mock = mock.Mock()
1985
1986    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1987    pb.options.verbose = verbose
1988    pb.options.cmake_only = cmake_only
1989    pb.options.seed = 123
1990    pb.log_info_file = mock.Mock()
1991
1992    results_mock = mock.Mock()
1993    results_mock.iteration = 1
1994    results_mock.total = 25
1995    results_mock.done = 19
1996    results_mock.passed = 17
1997    results_mock.skipped_configs = 3
1998    results_mock.skipped_cases = 4
1999    results_mock.failed = 2
2000    results_mock.error = 1
2001    results_mock.cases = 0
2002
2003    pb.report_out(results_mock)
2004
2005    assert results_mock.cases == 25
2006
2007    trim_actual_log = re.sub(
2008        r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2009        '',
2010        caplog.text
2011    )
2012    trim_actual_log = re.sub(r'twister:runner.py:\d+', '', trim_actual_log)
2013    assert all([log in trim_actual_log for log in expected_logs])
2014
2015    if expected_out:
2016        out, err = capfd.readouterr()
2017        sys.stdout.write(out)
2018        sys.stderr.write(err)
2019
2020        # Remove 7b ANSI C1 escape sequences (colours)
2021        out = re.sub(
2022            r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2023            '',
2024            out
2025        )
2026
2027        assert expected_out in out
2028
2029
2030def test_projectbuilder_cmake_assemble_args():
2031    extra_args = ['CONFIG_FOO=y', 'DUMMY_EXTRA="yes"']
2032    handler = mock.Mock(ready=True, args=['dummy_handler'])
2033    extra_conf_files = ['extrafile1.conf', 'extrafile2.conf']
2034    extra_overlay_confs = ['extra_overlay_conf']
2035    extra_dtc_overlay_files = ['overlay1.dtc', 'overlay2.dtc']
2036    cmake_extra_args = ['CMAKE1="yes"', 'CMAKE2=n']
2037    build_dir = os.path.join('build', 'dir')
2038
2039    with mock.patch('os.path.exists', return_value=True):
2040        results = ProjectBuilder.cmake_assemble_args(extra_args, handler,
2041                                                     extra_conf_files,
2042                                                     extra_overlay_confs,
2043                                                     extra_dtc_overlay_files,
2044                                                     cmake_extra_args,
2045                                                     build_dir)
2046
2047    expected_results = [
2048        '-DCONFIG_FOO=y',
2049        '-DCMAKE1=\"yes\"',
2050        '-DCMAKE2=n',
2051        '-DDUMMY_EXTRA=yes',
2052        '-Ddummy_handler',
2053        '-DCONF_FILE=extrafile1.conf;extrafile2.conf',
2054        '-DDTC_OVERLAY_FILE=overlay1.dtc;overlay2.dtc',
2055        f'-DOVERLAY_CONFIG=extra_overlay_conf ' \
2056        f'{os.path.join("build", "dir", "twister", "testsuite_extra.conf")}'
2057    ]
2058
2059    assert results == expected_results
2060
2061
2062def test_projectbuilder_cmake():
2063    instance_mock = mock.Mock()
2064    instance_mock.handler = 'dummy handler'
2065    instance_mock.build_dir = os.path.join('build', 'dir')
2066    env_mock = mock.Mock()
2067
2068    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2069    pb.build_dir = 'build_dir'
2070    pb.testsuite.extra_args = ['some', 'args']
2071    pb.testsuite.extra_conf_files = ['some', 'files1']
2072    pb.testsuite.extra_overlay_confs = ['some', 'files2']
2073    pb.testsuite.extra_dtc_overlay_files = ['some', 'files3']
2074    pb.options.extra_args = ['other', 'args']
2075    pb.cmake_assemble_args = mock.Mock(return_value=['dummy'])
2076    cmake_res_mock = mock.Mock()
2077    pb.run_cmake = mock.Mock(return_value=cmake_res_mock)
2078
2079    res = pb.cmake(['dummy filter'])
2080
2081    assert res == cmake_res_mock
2082    pb.cmake_assemble_args.assert_called_once_with(
2083        pb.testsuite.extra_args,
2084        pb.instance.handler,
2085        pb.testsuite.extra_conf_files,
2086        pb.testsuite.extra_overlay_confs,
2087        pb.testsuite.extra_dtc_overlay_files,
2088        pb.options.extra_args,
2089        pb.instance.build_dir
2090    )
2091    pb.run_cmake.assert_called_once_with(['dummy'], ['dummy filter'])
2092
2093
2094def test_projectbuilder_build(mocked_jobserver):
2095    instance_mock = mock.Mock()
2096    instance_mock.testsuite.harness = 'test'
2097    env_mock = mock.Mock()
2098
2099    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2100
2101    pb.build_dir = 'build_dir'
2102    pb.run_build = mock.Mock(return_value={'dummy': 'dummy'})
2103
2104    res = pb.build()
2105
2106    pb.run_build.assert_called_once_with(['--build', 'build_dir'])
2107    assert res == {'dummy': 'dummy'}
2108
2109
2110TESTDATA_14 = [
2111    (
2112        True,
2113        'device',
2114        234,
2115        'native_sim',
2116        'posix',
2117        {'CONFIG_FAKE_ENTROPY_NATIVE_POSIX': 'y'},
2118        'pytest',
2119        True,
2120        True,
2121        True,
2122        True,
2123        True,
2124        False
2125    ),
2126    (
2127        True,
2128        'not device',
2129        None,
2130        'native_sim',
2131        'not posix',
2132        {'CONFIG_FAKE_ENTROPY_NATIVE_POSIX': 'y'},
2133        'not pytest',
2134        False,
2135        False,
2136        False,
2137        False,
2138        False,
2139        True
2140    ),
2141    (
2142        False,
2143        'device',
2144        234,
2145        'native_sim',
2146        'posix',
2147        {'CONFIG_FAKE_ENTROPY_NATIVE_POSIX': 'y'},
2148        'pytest',
2149        False,
2150        False,
2151        False,
2152        False,
2153        False,
2154        False
2155    ),
2156]
2157
2158@pytest.mark.parametrize(
2159    'ready, type_str, seed, platform_name, platform_arch, defconfig, harness,' \
2160    ' expect_duts, expect_parse_generated, expect_seed,' \
2161    ' expect_extra_test_args, expect_pytest, expect_handle',
2162    TESTDATA_14,
2163    ids=['pytest full', 'not pytest minimal', 'not ready']
2164)
2165def test_projectbuilder_run(
2166    mocked_jobserver,
2167    ready,
2168    type_str,
2169    seed,
2170    platform_name,
2171    platform_arch,
2172    defconfig,
2173    harness,
2174    expect_duts,
2175    expect_parse_generated,
2176    expect_seed,
2177    expect_extra_test_args,
2178    expect_pytest,
2179    expect_handle
2180):
2181    pytest_mock = mock.Mock(spec=Pytest)
2182    harness_mock = mock.Mock()
2183
2184    def mock_harness(name):
2185        if name == 'Pytest':
2186            return pytest_mock
2187        else:
2188            return harness_mock
2189
2190    instance_mock = mock.Mock()
2191    instance_mock.handler.get_test_timeout = mock.Mock(return_value=60)
2192    instance_mock.handler.seed = 123
2193    instance_mock.handler.ready = ready
2194    instance_mock.handler.type_str = type_str
2195    instance_mock.handler.duts = [mock.Mock(name='dummy dut')]
2196    instance_mock.platform.name = platform_name
2197    instance_mock.platform.arch = platform_arch
2198    instance_mock.testsuite.harness = harness
2199    env_mock = mock.Mock()
2200
2201    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2202    pb.options.extra_test_args = ['dummy_arg1', 'dummy_arg2']
2203    pb.duts = ['another dut']
2204    pb.options.seed = seed
2205    pb.defconfig = defconfig
2206    pb.parse_generated = mock.Mock()
2207
2208    with mock.patch('twisterlib.runner.HarnessImporter.get_harness',
2209                    mock_harness):
2210        pb.run()
2211
2212    if expect_duts:
2213        assert pb.instance.handler.duts == ['another dut']
2214
2215    if expect_parse_generated:
2216        pb.parse_generated.assert_called_once()
2217
2218    if expect_seed:
2219        assert pb.instance.handler.seed == seed
2220
2221    if expect_extra_test_args:
2222        assert pb.instance.handler.extra_test_args == ['dummy_arg1',
2223                                                       'dummy_arg2']
2224
2225    if expect_pytest:
2226        pytest_mock.pytest_run.assert_called_once_with(60)
2227
2228    if expect_handle:
2229        pb.instance.handler.handle.assert_called_once_with(harness_mock)
2230
2231
2232TESTDATA_15 = [
2233    (False, False, False, True),
2234    (True, False, True, False),
2235    (False, True, False, True),
2236    (True, True, False, True),
2237]
2238
2239@pytest.mark.parametrize(
2240    'enable_size_report, cmake_only, expect_calc_size, expect_zeroes',
2241    TESTDATA_15,
2242    ids=['none', 'size_report', 'cmake', 'size_report+cmake']
2243)
2244def test_projectbuilder_gather_metrics(
2245    mocked_jobserver,
2246    enable_size_report,
2247    cmake_only,
2248    expect_calc_size,
2249    expect_zeroes
2250):
2251    instance_mock = mock.Mock()
2252    instance_mock.metrics = {}
2253    env_mock = mock.Mock()
2254
2255    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2256    pb.options.enable_size_report = enable_size_report
2257    pb.options.create_rom_ram_report = False
2258    pb.options.cmake_only = cmake_only
2259    pb.calc_size = mock.Mock()
2260
2261    pb.gather_metrics(instance_mock)
2262
2263    if expect_calc_size:
2264        pb.calc_size.assert_called_once()
2265
2266    if expect_zeroes:
2267        assert instance_mock.metrics['used_ram'] == 0
2268        assert instance_mock.metrics['used_rom'] == 0
2269        assert instance_mock.metrics['available_rom'] == 0
2270        assert instance_mock.metrics['available_ram'] == 0
2271        assert instance_mock.metrics['unrecognized'] == []
2272
2273
2274TESTDATA_16 = [
2275    ('error', mock.ANY, False, False, False),
2276    ('failed', mock.ANY, False, False, False),
2277    ('skipped', mock.ANY, False, False, False),
2278    ('filtered', 'native', False, False, True),
2279    ('passed', 'qemu', False, False, True),
2280    ('filtered', 'unit', False, False, True),
2281    ('filtered', 'mcu', True, True, False),
2282    ('passed', 'frdm_k64f', False, True, False),
2283]
2284
2285@pytest.mark.parametrize(
2286    'status, platform_type, expect_warnings, expect_calcs, expect_zeroes',
2287    TESTDATA_16,
2288    ids=[x[0] + (', ' + x[1]) if x[1] != mock.ANY else '' for x in TESTDATA_16]
2289)
2290def test_projectbuilder_calc_size(
2291    status,
2292    platform_type,
2293    expect_warnings,
2294    expect_calcs,
2295    expect_zeroes
2296):
2297    size_calc_mock = mock.Mock()
2298
2299    instance_mock = mock.Mock()
2300    instance_mock.status = status
2301    instance_mock.platform.type = platform_type
2302    instance_mock.metrics = {}
2303    instance_mock.calculate_sizes = mock.Mock(return_value=size_calc_mock)
2304
2305    from_buildlog = True
2306
2307    ProjectBuilder.calc_size(instance_mock, from_buildlog)
2308
2309    if expect_calcs:
2310        instance_mock.calculate_sizes.assert_called_once_with(
2311            from_buildlog=from_buildlog,
2312            generate_warning=expect_warnings
2313        )
2314
2315        assert instance_mock.metrics['used_ram'] == \
2316               size_calc_mock.get_used_ram()
2317        assert instance_mock.metrics['used_rom'] == \
2318               size_calc_mock.get_used_rom()
2319        assert instance_mock.metrics['available_rom'] == \
2320               size_calc_mock.get_available_rom()
2321        assert instance_mock.metrics['available_ram'] == \
2322               size_calc_mock.get_available_ram()
2323        assert instance_mock.metrics['unrecognized'] == \
2324               size_calc_mock.unrecognized_sections()
2325
2326    if expect_zeroes:
2327        assert instance_mock.metrics['used_ram'] == 0
2328        assert instance_mock.metrics['used_rom'] == 0
2329        assert instance_mock.metrics['available_rom'] == 0
2330        assert instance_mock.metrics['available_ram'] == 0
2331        assert instance_mock.metrics['unrecognized'] == []
2332
2333    if expect_calcs or expect_zeroes:
2334        assert instance_mock.metrics['handler_time'] == \
2335               instance_mock.execution_time
2336    else:
2337        assert instance_mock.metrics == {}
2338
2339
2340TESTDATA_17 = [
2341    ('linux', 'posix', {'jobs': 4}, True, 32, 'GNUMakeJobClient'),
2342    ('linux', 'posix', {'build_only': True}, False, 16, 'GNUMakeJobServer'),
2343    ('linux', '???', {}, False, 8, 'JobClient'),
2344    ('linux', '???', {'jobs': 4}, False, 4, 'JobClient'),
2345]
2346
2347@pytest.mark.parametrize(
2348    'platform, os_name, options, jobclient_from_environ, expected_jobs,' \
2349    ' expected_jobserver',
2350    TESTDATA_17,
2351    ids=['GNUMakeJobClient', 'GNUMakeJobServer',
2352         'JobClient', 'Jobclient+options']
2353)
2354def test_twisterrunner_run(
2355    caplog,
2356    platform,
2357    os_name,
2358    options,
2359    jobclient_from_environ,
2360    expected_jobs,
2361    expected_jobserver
2362):
2363    def mock_client_from_environ(jobs):
2364        if jobclient_from_environ:
2365            jobclient_mock = mock.Mock(jobs=32)
2366            jobclient_mock.name = 'GNUMakeJobClient'
2367            return jobclient_mock
2368        return None
2369
2370    instances = {'dummy instance': mock.Mock(metrics={'k': 'v'})}
2371    suites = [mock.Mock()]
2372    env_mock = mock.Mock()
2373
2374    tr = TwisterRunner(instances, suites, env=env_mock)
2375    tr.options.retry_failed = 2
2376    tr.options.retry_interval = 10
2377    tr.options.retry_build_errors = True
2378    tr.options.jobs = None
2379    tr.options.build_only = None
2380    for k, v in options.items():
2381        setattr(tr.options, k, v)
2382    tr.update_counting_before_pipeline = mock.Mock()
2383    tr.execute = mock.Mock()
2384    tr.show_brief = mock.Mock()
2385
2386    gnumakejobserver_mock = mock.Mock()
2387    gnumakejobserver_mock().name='GNUMakeJobServer'
2388    jobclient_mock = mock.Mock()
2389    jobclient_mock().name='JobClient'
2390
2391    pipeline_q = queue.LifoQueue()
2392    done_q = queue.LifoQueue()
2393    done_instance = mock.Mock(
2394        metrics={'k2': 'v2'},
2395        execution_time=30
2396    )
2397    done_instance.name='dummy instance'
2398    done_q.put(done_instance)
2399    manager_mock = mock.Mock()
2400    manager_mock().LifoQueue = mock.Mock(
2401        side_effect=iter([pipeline_q, done_q])
2402    )
2403
2404    results_mock = mock.Mock()
2405    results_mock().error = 1
2406    results_mock().iteration = 0
2407    results_mock().failed = 2
2408    results_mock().total = 9
2409
2410    with mock.patch('twisterlib.runner.ExecutionCounter', results_mock), \
2411         mock.patch('twisterlib.runner.BaseManager', manager_mock), \
2412         mock.patch('twisterlib.runner.GNUMakeJobClient.from_environ',
2413                    mock_client_from_environ), \
2414         mock.patch('twisterlib.runner.GNUMakeJobServer',
2415                    gnumakejobserver_mock), \
2416         mock.patch('twisterlib.runner.JobClient', jobclient_mock), \
2417         mock.patch('multiprocessing.cpu_count', return_value=8), \
2418         mock.patch('sys.platform', platform), \
2419         mock.patch('time.sleep', mock.Mock()), \
2420         mock.patch('os.name', os_name):
2421        tr.run()
2422
2423    assert f'JOBS: {expected_jobs}' in caplog.text
2424
2425    assert tr.jobserver.name == expected_jobserver
2426
2427    assert tr.instances['dummy instance'].metrics == {
2428        'k': 'v',
2429        'k2': 'v2',
2430        'handler_time': 30,
2431        'unrecognized': []
2432    }
2433
2434    assert results_mock().error == 0
2435
2436
2437def test_twisterrunner_update_counting_before_pipeline():
2438    instances = {
2439        'dummy1': mock.Mock(
2440            status='filtered',
2441            reason='runtime filter',
2442            testsuite=mock.Mock(
2443                testcases=[mock.Mock()]
2444            )
2445        ),
2446        'dummy2': mock.Mock(
2447            status='filtered',
2448            reason='static filter',
2449            testsuite=mock.Mock(
2450                testcases=[mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock()]
2451            )
2452        ),
2453        'dummy3': mock.Mock(
2454            status='error',
2455            reason='error',
2456            testsuite=mock.Mock(
2457                testcases=[mock.Mock()]
2458            )
2459        ),
2460        'dummy4': mock.Mock(
2461            status='passed',
2462            reason='OK',
2463            testsuite=mock.Mock(
2464                testcases=[mock.Mock()]
2465            )
2466        ),
2467        'dummy5': mock.Mock(
2468            status='skipped',
2469            reason=None,
2470            testsuite=mock.Mock(
2471                testcases=[mock.Mock()]
2472            )
2473        )
2474    }
2475    suites = [mock.Mock()]
2476    env_mock = mock.Mock()
2477
2478    tr = TwisterRunner(instances, suites, env=env_mock)
2479    tr.results = mock.Mock(
2480        skipped_filter = 0,
2481        skipped_configs = 0,
2482        skipped_cases = 0,
2483        cases = 0,
2484        error = 0
2485    )
2486
2487    tr.update_counting_before_pipeline()
2488
2489    assert tr.results.skipped_filter == 1
2490    assert tr.results.skipped_configs == 1
2491    assert tr.results.skipped_cases == 4
2492    assert tr.results.cases == 4
2493    assert tr.results.error == 1
2494
2495
2496def test_twisterrunner_show_brief(caplog):
2497    instances = {
2498        'dummy1': mock.Mock(),
2499        'dummy2': mock.Mock(),
2500        'dummy3': mock.Mock(),
2501        'dummy4': mock.Mock(),
2502        'dummy5': mock.Mock()
2503    }
2504    suites = [mock.Mock(), mock.Mock()]
2505    env_mock = mock.Mock()
2506
2507    tr = TwisterRunner(instances, suites, env=env_mock)
2508    tr.results = mock.Mock(
2509        skipped_filter = 3,
2510        skipped_configs = 4,
2511        skipped_cases = 0,
2512        cases = 0,
2513        error = 0
2514    )
2515
2516    tr.show_brief()
2517
2518    log = '2 test scenarios (5 test instances) selected,' \
2519          ' 4 configurations skipped (3 by static filter, 1 at runtime).'
2520
2521    assert log in caplog.text
2522
2523
2524TESTDATA_18 = [
2525    (False, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2526    (False, False, True, [{'op': 'filter', 'test': mock.ANY},
2527                          {'op': 'cmake', 'test': mock.ANY}]),
2528    (False, True, True, [{'op': 'run', 'test': mock.ANY},
2529                         {'op': 'run', 'test': mock.ANY}]),
2530    (False, True, False, [{'op': 'run', 'test': mock.ANY}]),
2531    (True, True, False, [{'op': 'cmake', 'test': mock.ANY}]),
2532    (True, True, True, [{'op': 'filter', 'test': mock.ANY},
2533                        {'op': 'cmake', 'test': mock.ANY}]),
2534    (True, False, True, [{'op': 'filter', 'test': mock.ANY},
2535                         {'op': 'cmake', 'test': mock.ANY}]),
2536    (True, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2537]
2538
2539@pytest.mark.parametrize(
2540    'build_only, test_only, retry_build_errors, expected_pipeline_elements',
2541    TESTDATA_18,
2542    ids=['none', 'retry', 'test+retry', 'test', 'build+test',
2543         'build+test+retry', 'build+retry', 'build']
2544)
2545def test_twisterrunner_add_tasks_to_queue(
2546    build_only,
2547    test_only,
2548    retry_build_errors,
2549    expected_pipeline_elements
2550):
2551    def mock_get_cmake_filter_stages(filter, keys):
2552        return [filter]
2553
2554    instances = {
2555        'dummy1': mock.Mock(run=True, retries=0, status='passed', build_dir="/tmp"),
2556        'dummy2': mock.Mock(run=True, retries=0, status='skipped', build_dir="/tmp"),
2557        'dummy3': mock.Mock(run=True, retries=0, status='filtered', build_dir="/tmp"),
2558        'dummy4': mock.Mock(run=True, retries=0, status='error', build_dir="/tmp"),
2559        'dummy5': mock.Mock(run=True, retries=0, status='failed', build_dir="/tmp")
2560    }
2561    instances['dummy4'].testsuite.filter = 'some'
2562    instances['dummy5'].testsuite.filter = 'full'
2563    suites = [mock.Mock(), mock.Mock()]
2564    env_mock = mock.Mock()
2565
2566    tr = TwisterRunner(instances, suites, env=env_mock)
2567    tr.get_cmake_filter_stages = mock.Mock(
2568        side_effect=mock_get_cmake_filter_stages
2569    )
2570
2571    pipeline_mock = mock.Mock()
2572
2573    tr.add_tasks_to_queue(
2574        pipeline_mock,
2575        build_only,
2576        test_only,
2577        retry_build_errors
2578    )
2579
2580    assert all(
2581        [build_only != instance.run for instance in instances.values()]
2582    )
2583
2584    tr.get_cmake_filter_stages.assert_any_call('full', mock.ANY)
2585    if retry_build_errors:
2586        tr.get_cmake_filter_stages.assert_any_call('some', mock.ANY)
2587
2588    print(pipeline_mock.put.call_args_list)
2589    print([mock.call(el) for el in expected_pipeline_elements])
2590
2591    assert pipeline_mock.put.call_args_list == \
2592           [mock.call(el) for el in expected_pipeline_elements]
2593
2594
2595TESTDATA_19 = [
2596    ('linux'),
2597    ('nt')
2598]
2599
2600@pytest.mark.parametrize(
2601    'platform',
2602    TESTDATA_19,
2603)
2604def test_twisterrunner_pipeline_mgr(mocked_jobserver, platform):
2605    counter = 0
2606    def mock_get_nowait():
2607        nonlocal counter
2608        counter += 1
2609        if counter > 5:
2610            raise queue.Empty()
2611        return {'test': 'dummy'}
2612
2613    instances = {}
2614    suites = []
2615    env_mock = mock.Mock()
2616
2617    tr = TwisterRunner(instances, suites, env=env_mock)
2618    tr.jobserver = mock.Mock(
2619        get_job=mock.Mock(
2620            return_value=nullcontext()
2621        )
2622    )
2623
2624    pipeline_mock = mock.Mock()
2625    pipeline_mock.get_nowait = mock.Mock(side_effect=mock_get_nowait)
2626    done_queue_mock = mock.Mock()
2627    lock_mock = mock.Mock()
2628    results_mock = mock.Mock()
2629
2630    with mock.patch('sys.platform', platform), \
2631         mock.patch('twisterlib.runner.ProjectBuilder',\
2632                    return_value=mock.Mock()) as pb:
2633        tr.pipeline_mgr(pipeline_mock, done_queue_mock, lock_mock, results_mock)
2634
2635    assert len(pb().process.call_args_list) == 5
2636
2637    if platform == 'linux':
2638        tr.jobserver.get_job.assert_called_once()
2639
2640
2641def test_twisterrunner_execute(caplog):
2642    counter = 0
2643    def mock_join():
2644        nonlocal counter
2645        counter += 1
2646        if counter > 3:
2647            raise KeyboardInterrupt()
2648
2649    instances = {}
2650    suites = []
2651    env_mock = mock.Mock()
2652
2653    tr = TwisterRunner(instances, suites, env=env_mock)
2654    tr.add_tasks_to_queue = mock.Mock()
2655    tr.jobs = 5
2656
2657    process_mock = mock.Mock()
2658    process_mock().join = mock.Mock(side_effect=mock_join)
2659    pipeline_mock = mock.Mock()
2660    done_mock = mock.Mock()
2661
2662    with mock.patch('twisterlib.runner.Process', process_mock):
2663        tr.execute(pipeline_mock, done_mock)
2664
2665    assert 'Execution interrupted' in caplog.text
2666
2667    assert len(process_mock().start.call_args_list) == 5
2668    assert len(process_mock().join.call_args_list) == 4
2669    assert len(process_mock().terminate.call_args_list) == 5
2670
2671
2672
2673TESTDATA_20 = [
2674    ('', []),
2675    ('not ARCH in ["x86", "arc"]', ['full']),
2676    ('dt_dummy(x, y)', ['dts']),
2677    ('not CONFIG_FOO', ['kconfig']),
2678    ('dt_dummy and CONFIG_FOO', ['dts', 'kconfig']),
2679]
2680
2681@pytest.mark.parametrize(
2682    'filter, expected_result',
2683    TESTDATA_20,
2684    ids=['none', 'full', 'dts', 'kconfig', 'dts+kconfig']
2685)
2686def test_twisterrunner_get_cmake_filter_stages(filter, expected_result):
2687    result = TwisterRunner.get_cmake_filter_stages(filter, ['not', 'and'])
2688
2689    assert sorted(result) == sorted(expected_result)
2690