1#!/usr/bin/env python3
2
3# Copyright(c) 2023 Google LLC
4# SPDX-License-Identifier: Apache-2.0
5
6"""
7This test file contains testsuites for the Harness classes of twister
8"""
9import mock
10import sys
11import os
12import pytest
13import re
14import logging as logger
15
16# ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
17from conftest import ZEPHYR_BASE
18
19sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
20
21from twisterlib.harness import (
22    Bsim,
23    Console,
24    Gtest,
25    Harness,
26    HarnessImporter,
27    Pytest,
28    PytestHarnessException,
29    Robot,
30    Test,
31)
32from twisterlib.statuses import TwisterStatus
33from twisterlib.testsuite import TestSuite
34from twisterlib.testinstance import TestInstance
35
36GTEST_START_STATE = " RUN      "
37GTEST_PASS_STATE = "       OK "
38GTEST_SKIP_STATE = " DISABLED "
39GTEST_FAIL_STATE = "  FAILED  "
40SAMPLE_GTEST_START = (
41    "[00:00:00.000,000] <inf> label:  [==========] Running all tests."
42)
43SAMPLE_GTEST_FMT = (
44    "[00:00:00.000,000] <inf> label:  [{state}] {suite}.{test} (0ms)"
45)
46SAMPLE_GTEST_FMT_FAIL_WITH_PARAM = (
47    "[00:00:00.000,000] <inf> label:  "
48    + "[{state}] {suite}.{test}, where GetParam() = 8-byte object <0B-00 00-00 00-9A 80-F7> (0 ms total)"
49)
50SAMPLE_GTEST_END = (
51    "[00:00:00.000,000] <inf> label:  [==========] Done running all tests."
52)
53SAMPLE_GTEST_END_VARIANT = (
54    "[00:00:00.000,000] <inf> label:  [----------] Global test environment tear-down"
55)
56
57
58def process_logs(harness, logs):
59    for line in logs:
60        harness.handle(line)
61
62
63TEST_DATA_RECORDING = [
64    ([""], ["^START:(?P<foo>.*):END"], [], None, None),
65    (["START:bar:STOP"], ["^START:(?P<foo>.*):END"], [], None, None),
66    (["START:bar:END"], ["^START:(?P<foo>.*):END"], [{"foo": "bar"}], None, None),
67    (
68        ["START:bar:baz:END"],
69        ["^START:(?P<foo>.*):(?P<boo>.*):END"],
70        [{"foo": "bar", "boo": "baz"}],
71        None,
72        None,
73    ),
74    (
75        ["START:bar:END"],
76        ["^(START:(?P<foo>[a-z]+):END)|(START:(?P<boo>[0-9]+):END)"],
77        [{"foo": "bar", "boo": ""}],
78        None,
79        None,
80    ),
81    (
82        ["START:bar:baz:END"],
83        ["^START:(?P<foo>.*):baz:END", "^START:bar:(?P<boo>.*):END"],
84        [{"foo": "bar"}, {"boo": "baz"}],
85        None,
86        None,
87    ),
88    (
89        ["START:bar:END", "START:123:END"],
90        ["^START:(?P<foo>[a-z]+):END", "^START:(?P<boo>[0-9]+):END"],
91        [{"foo": "bar"}, {"boo": "123"}],
92        None,
93        None,
94    ),
95    (
96        ["START:bar:END", "START:123:END"],
97        ["^START:(?P<foo>[a-z]+):END", "^START:(?P<foo>[0-9]+):END"],
98        [{"foo": "bar"}, {"foo": "123"}],
99        None,
100        None,
101    ),
102    (
103        ["START:bar:END", "START:123:END"],
104        ["^START:(?P<foo>[a-z]+):END", "^START:(?P<foo>[0-9]+):END"],
105        [{"foo": ["bar", "123"]}],
106        None,
107        True,
108    ),
109    (
110        ["START:bar:baz:END"],
111        ["^START:(?P<foo>.*):baz:END", "^START:bar:(?P<boo>.*):END"],
112        [{"foo": "bar", "boo": "baz"}],
113        None,
114        True,
115    ),
116    (
117        ["START:bar:baz:END"],
118        ["^START:(?P<foo>.*):baz:END", "^START:bar:(?P<foo>.*):END"],
119        [{"foo": ["bar", "baz"]}],
120        None,
121        True,
122    ),
123    (
124        ["START:bar:baz:END", "START:may:jun:END"],
125        ["^START:(?P<foo>.*):(?P<boo>.*):END"],
126        [{"foo": "bar", "boo": "baz"}, {"foo": "may", "boo": "jun"}],
127        None,
128        None,
129    ),
130    (["START:bar:END"], ["^START:(?P<foo>.*):END"], [{"foo": "bar"}], [], None),
131    (["START:bar:END"], ["^START:(?P<foo>.*):END"], [{"foo": "bar"}], ["boo"], None),
132    (
133        ["START:bad_json:END"],
134        ["^START:(?P<foo>.*):END"],
135        [
136            {
137                "foo": {
138                    "ERROR": {
139                        "msg": "Expecting value: line 1 column 1 (char 0)",
140                        "doc": "bad_json",
141                    }
142                }
143            }
144        ],
145        ["foo"],
146        None,
147    ),
148    (["START::END"], ["^START:(?P<foo>.*):END"], [{"foo": {}}], ["foo"], None),
149    (
150        ['START: {"one":1, "two":2} :END'],
151        ["^START:(?P<foo>.*):END"],
152        [{"foo": {"one": 1, "two": 2}}],
153        ["foo"],
154        None,
155    ),
156    (
157        ['START: {"one":1, "two":2} :STOP:oops:END'],
158        ["^START:(?P<foo>.*):STOP:(?P<boo>.*):END"],
159        [{"foo": {"one": 1, "two": 2}, "boo": "oops"}],
160        ["foo"],
161        None,
162    ),
163    (
164        ['START: {"one":1, "two":2} :STOP:{"oops":0}:END'],
165        ["^START:(?P<foo>.*):STOP:(?P<boo>.*):END"],
166        [{"foo": {"one": 1, "two": 2}, "boo": {"oops": 0}}],
167        ["foo", "boo"],
168        None,
169    ),
170    (
171        ['START: {"one":1, "two":2} :STOP:{"oops":0}:END'],
172        ["^START:(?P<foo>.*):STOP:.*:END",
173         "^START:.*:STOP:(?P<boo>.*):END"
174        ],
175        [{"foo": {"one": 1, "two": 2}}, {"boo": {"oops": 0}}],
176        ["foo", "boo"],
177        None,
178    ),
179    (
180        ['START: {"one":1, "two":2} :STOP:{"oops":0}:END'],
181        ["^START:(?P<foo>.*):STOP:.*:END",
182         "^START:.*:STOP:(?P<foo>.*):END"
183        ],
184        [{"foo": [{"one": 1, "two": 2}, {"oops": 0}]}],
185        ["foo"],
186        True,
187    ),
188]
189
190
191@pytest.mark.parametrize(
192    "lines, patterns, expected_records, as_json, merge",
193    TEST_DATA_RECORDING,
194    ids=[
195        "empty",
196        "no match",
197        "match 1 field",
198        "match 2 fields",
199        "2 or-ed groups one miss",
200        "one line, two patters, match 2 fields -> 2 records",
201        "two lines, two patters -> 2 records",
202        "two lines, two patters same field -> 2 same records",
203        "two lines, two patters same field merge -> 1 records 2 values",
204        "one line, two patters, match 2 fields, merge -> 1 record",
205        "one line, two patters, match 1 field, merge -> 1 record list",
206        "match 2 records",
207        "as_json empty",
208        "as_json no such field",
209        "error parsing json",
210        "empty json value",
211        "simple json",
212        "plain field and json field",
213        "two json fields",
214        "two json fields in two patterns -> 2 records",
215        "two json fields in two patterns merge -> 1 records 2 items",
216    ],
217)
218def test_harness_parse_record(lines, patterns, expected_records, as_json, merge):
219    harness = Harness()
220    harness.record = {"regex": patterns}
221    harness.record_patterns = [re.compile(p) for p in patterns]
222
223    harness.record_merge = merge
224    harness.record_as_json = as_json
225    if as_json is not None:
226        harness.record["as_json"] = as_json
227
228    assert not harness.recording
229
230    for line in lines:
231        harness.parse_record(line)
232
233    assert harness.recording == expected_records
234
235
236TEST_DATA_1 = [
237    ("RunID: 12345", False, False, False, TwisterStatus.NONE, True),
238    ("PROJECT EXECUTION SUCCESSFUL", False, False, False, TwisterStatus.PASS, False),
239    ("PROJECT EXECUTION SUCCESSFUL", True, False, False, TwisterStatus.FAIL, False),
240    ("PROJECT EXECUTION FAILED", False, False, False, TwisterStatus.FAIL, False),
241    ("ZEPHYR FATAL ERROR", False, True, False, TwisterStatus.NONE, False),
242    ("GCOV_COVERAGE_DUMP_START", None, None, True, TwisterStatus.NONE, False),
243    ("GCOV_COVERAGE_DUMP_END", None, None, False, TwisterStatus.NONE, False),
244]
245
246
247@pytest.mark.parametrize(
248    "line, fault, fail_on_fault, cap_cov, exp_stat, exp_id",
249    TEST_DATA_1,
250    ids=[
251        "match id",
252        "passed passed",
253        "passed failed",
254        "failed failed",
255        "fail on fault",
256        "GCOV START",
257        "GCOV END",
258    ],
259)
260def test_harness_process_test(line, fault, fail_on_fault, cap_cov, exp_stat, exp_id):
261    # Arrange
262    harness = Harness()
263    harness.run_id = 12345
264    harness.status = TwisterStatus.NONE
265    harness.fault = fault
266    harness.fail_on_fault = fail_on_fault
267    mock.patch.object(Harness, "parse_record", return_value=None)
268
269    # Act
270    harness.process_test(line)
271
272    # Assert
273    assert harness.matched_run_id == exp_id
274    assert harness.status == exp_stat
275    assert harness.capture_coverage == cap_cov
276    assert harness.recording == []
277
278
279def test_robot_configure(tmp_path):
280    # Arrange
281    mock_platform = mock.Mock()
282    mock_platform.name = "mock_platform"
283    mock_platform.normalized_name = "mock_platform"
284
285    mock_testsuite = mock.Mock(id="id", testcases=[])
286    mock_testsuite.name = "mock_testsuite"
287    mock_testsuite.harness_config = {}
288
289    outdir = tmp_path / "gtest_out"
290    outdir.mkdir()
291
292    instance = TestInstance(
293        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr',  outdir=outdir
294    )
295    instance.testsuite.harness_config = {
296        "robot_testsuite": "/path/to/robot/test",
297        "robot_option": "test_option",
298    }
299    robot_harness = Robot()
300
301    # Act
302    robot_harness.configure(instance)
303
304    # Assert
305    assert robot_harness.instance == instance
306    assert robot_harness.path == "/path/to/robot/test"
307    assert robot_harness.option == "test_option"
308
309
310def test_robot_handle(tmp_path):
311    # Arrange
312    mock_platform = mock.Mock()
313    mock_platform.name = "mock_platform"
314    mock_platform.normalized_name = "mock_platform"
315
316    mock_testsuite = mock.Mock(id="id", testcases=[])
317    mock_testsuite.name = "mock_testsuite"
318    mock_testsuite.harness_config = {}
319
320    outdir = tmp_path / "gtest_out"
321    outdir.mkdir()
322
323    instance = TestInstance(
324        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
325    )
326
327    handler = Robot()
328    handler.instance = instance
329    handler.id = "test_case_1"
330
331    line = "Test case passed"
332
333    # Act
334    handler.handle(line)
335    tc = instance.get_case_or_create("test_case_1")
336
337    # Assert
338    assert instance.status == TwisterStatus.PASS
339    assert tc.status == TwisterStatus.PASS
340
341
342TEST_DATA_2 = [
343    ("", 0, TwisterStatus.PASS),
344    ("Robot test failure: sourcedir for mock_platform", 1, TwisterStatus.FAIL),
345]
346
347
348@pytest.mark.parametrize(
349    "exp_out, returncode, expected_status", TEST_DATA_2, ids=["passed", "failed"]
350)
351def test_robot_run_robot_test(tmp_path, caplog, exp_out, returncode, expected_status):
352    # Arrange
353    command = ["command"]
354
355    handler = mock.Mock()
356    handler.sourcedir = "sourcedir"
357    handler.log = "handler.log"
358
359    path = "path"
360    option = "option"
361
362    mock_platform = mock.Mock()
363    mock_platform.name = "mock_platform"
364    mock_platform.normalized_name = "mock_platform"
365
366    mock_testsuite = mock.Mock(id="id", testcases=[mock.Mock()])
367    mock_testsuite.name = "mock_testsuite"
368    mock_testsuite.harness_config = {}
369
370    outdir = tmp_path / "gtest_out"
371    outdir.mkdir()
372
373    instance = TestInstance(
374        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
375    )
376    instance.build_dir = "build_dir"
377
378    open_mock = mock.mock_open()
379
380    robot = Robot()
381    robot.path = path
382    robot.option = option
383    robot.instance = instance
384    proc_mock = mock.Mock(
385        returncode=returncode, communicate=mock.Mock(return_value=(b"output", None))
386    )
387    popen_mock = mock.Mock(
388        return_value=mock.Mock(
389            __enter__=mock.Mock(return_value=proc_mock), __exit__=mock.Mock()
390        )
391    )
392
393    # Act
394    with mock.patch("subprocess.Popen", popen_mock) as mock.mock_popen, mock.patch(
395        "builtins.open", open_mock
396    ):
397        robot.run_robot_test(command, handler)
398
399    # Assert
400    assert instance.status == expected_status
401    open_mock().write.assert_called_once_with("output")
402    assert exp_out in caplog.text
403
404
405TEST_DATA_3 = [
406    ("one_line", None),
407    ("multi_line", 2),
408]
409
410
411@pytest.mark.parametrize(
412    "type, num_patterns", TEST_DATA_3, ids=["one line", "multi line"]
413)
414def test_console_configure(tmp_path, type, num_patterns):
415    # Arrange
416    mock_platform = mock.Mock()
417    mock_platform.name = "mock_platform"
418    mock_platform.normalized_name = "mock_platform"
419
420    mock_testsuite = mock.Mock(id="id", testcases=[])
421    mock_testsuite.name = "mock_testsuite"
422    mock_testsuite.harness_config = {}
423
424    outdir = tmp_path / "gtest_out"
425    outdir.mkdir()
426
427    instance = TestInstance(
428        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
429    )
430    instance.testsuite.harness_config = {
431        "type": type,
432        "regex": ["pattern1", "pattern2"],
433    }
434    console = Console()
435
436    # Act
437    console.configure(instance)
438
439    # Assert
440    if num_patterns == 2:
441        assert len(console.patterns) == num_patterns
442        assert [pattern.pattern for pattern in console.patterns] == [
443            "pattern1",
444            "pattern2",
445        ]
446    else:
447        assert console.pattern.pattern == "pattern1"
448
449
450TEST_DATA_4 = [
451    ("one_line", True, TwisterStatus.PASS, "line", False, False),
452    ("multi_line", True, TwisterStatus.PASS, "line", False, False),
453    ("multi_line", False, TwisterStatus.PASS, "line", False, False),
454    ("invalid_type", False, TwisterStatus.NONE, "line", False, False),
455    ("invalid_type", False, TwisterStatus.NONE, "ERROR", True, False),
456    ("invalid_type", False, TwisterStatus.NONE, "COVERAGE_START", False, True),
457    ("invalid_type", False, TwisterStatus.NONE, "COVERAGE_END", False, False),
458]
459
460
461@pytest.mark.parametrize(
462    "line_type, ordered_val, exp_state, line, exp_fault, exp_capture",
463    TEST_DATA_4,
464    ids=[
465        "one line",
466        "multi line ordered",
467        "multi line not ordered",
468        "logger error",
469        "fail on fault",
470        "GCOV START",
471        "GCOV END",
472    ],
473)
474def test_console_handle(
475    tmp_path, line_type, ordered_val, exp_state, line, exp_fault, exp_capture
476):
477    mock_platform = mock.Mock()
478    mock_platform.name = "mock_platform"
479    mock_platform.normalized_name = "mock_platform"
480
481    mock_testsuite = mock.Mock(id="id", testcases=[])
482    mock_testsuite.name = "mock_testsuite"
483    mock_testsuite.harness_config = {}
484
485    outdir = tmp_path / "gtest_out"
486    outdir.mkdir()
487
488    instance = TestInstance(
489        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
490    )
491
492    console = Console()
493    console.instance = instance
494    console.type = line_type
495    console.patterns = [re.compile("pattern1"), re.compile("pattern2")]
496    console.pattern = re.compile("pattern")
497    console.patterns_expected = 0
498    console.status = TwisterStatus.NONE
499    console.fail_on_fault = True
500    console.FAULT = "ERROR"
501    console.GCOV_START = "COVERAGE_START"
502    console.GCOV_END = "COVERAGE_END"
503    console.record = {"regex": "RESULT: (.*)"}
504    console.fieldnames = []
505    console.recording = []
506    console.regex = ["regex1", "regex2"]
507    console.id = "test_case_1"
508
509    instance.get_case_or_create("test_case_1")
510    instance.testsuite.id = "test_suite_1"
511
512    console.next_pattern = 0
513    console.ordered = ordered_val
514    line = line
515    console.handle(line)
516
517    line1 = "pattern1"
518    line2 = "pattern2"
519    console.handle(line1)
520    console.handle(line2)
521    assert console.status == exp_state
522    with pytest.raises(Exception):
523        console.handle(line)
524        assert logger.error.called
525    assert console.fault == exp_fault
526    assert console.capture_coverage == exp_capture
527
528
529TEST_DATA_5 = [("serial_pty", 0), (None, 0), (None, 1)]
530
531
532@pytest.mark.parametrize(
533    "pty_value, hardware_value",
534    TEST_DATA_5,
535    ids=["hardware pty", "hardware", "non hardware"],
536)
537def test_pytest__generate_parameters_for_hardware(tmp_path, pty_value, hardware_value):
538    # Arrange
539    mock_platform = mock.Mock()
540    mock_platform.name = "mock_platform"
541    mock_platform.normalized_name = "mock_platform"
542
543    mock_testsuite = mock.Mock(id="id", testcases=[])
544    mock_testsuite.name = "mock_testsuite"
545    mock_testsuite.harness_config = {}
546
547    outdir = tmp_path / "gtest_out"
548    outdir.mkdir()
549
550    instance = TestInstance(
551        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
552    )
553
554    handler = mock.Mock()
555    handler.instance = instance
556
557    hardware = mock.Mock()
558    hardware.serial_pty = pty_value
559    hardware.serial = "serial"
560    hardware.baud = 115200
561    hardware.runner = "runner"
562    hardware.runner_params = ["--runner-param1", "runner-param2"]
563    hardware.fixtures = ["fixture1:option1", "fixture2"]
564
565    options = handler.options
566    options.west_flash = "args"
567
568    hardware.probe_id = "123"
569    hardware.product = "product"
570    hardware.pre_script = "pre_script"
571    hardware.post_flash_script = "post_flash_script"
572    hardware.post_script = "post_script"
573
574    pytest_test = Pytest()
575    pytest_test.configure(instance)
576
577    # Act
578    if hardware_value == 0:
579        handler.get_hardware.return_value = hardware
580        command = pytest_test._generate_parameters_for_hardware(handler)
581    else:
582        handler.get_hardware.return_value = None
583
584    # Assert
585    if hardware_value == 1:
586        with pytest.raises(PytestHarnessException) as exinfo:
587            pytest_test._generate_parameters_for_hardware(handler)
588        assert str(exinfo.value) == "Hardware is not available"
589    else:
590        assert "--device-type=hardware" in command
591        if pty_value == "serial_pty":
592            assert "--device-serial-pty=serial_pty" in command
593        else:
594            assert "--device-serial=serial" in command
595            assert "--device-serial-baud=115200" in command
596        assert "--runner=runner" in command
597        assert "--runner-params=--runner-param1" in command
598        assert "--runner-params=runner-param2" in command
599        assert "--west-flash-extra-args=args" in command
600        assert "--device-id=123" in command
601        assert "--device-product=product" in command
602        assert "--pre-script=pre_script" in command
603        assert "--post-flash-script=post_flash_script" in command
604        assert "--post-script=post_script" in command
605        assert "--twister-fixture=fixture1:option1" in command
606        assert "--twister-fixture=fixture2" in command
607
608
609def test__update_command_with_env_dependencies():
610    cmd = ["cmd"]
611    pytest_test = Pytest()
612    mock.patch.object(Pytest, "PYTEST_PLUGIN_INSTALLED", False)
613
614    # Act
615    result_cmd, _ = pytest_test._update_command_with_env_dependencies(cmd)
616
617    # Assert
618    assert result_cmd == ["cmd", "-p", "twister_harness.plugin"]
619
620
621def test_pytest_run(tmp_path, caplog):
622    # Arrange
623    timeout = 10
624    cmd = ["command"]
625    exp_out = "Support for handler handler_type not implemented yet"
626
627    harness = Pytest()
628    harness = mock.create_autospec(harness)
629
630    mock.patch.object(Pytest, "generate_command", return_value=cmd)
631    mock.patch.object(Pytest, "run_command")
632
633    mock_platform = mock.Mock()
634    mock_platform.name = "mock_platform"
635    mock_platform.normalized_name = "mock_platform"
636
637    mock_testsuite = mock.Mock(
638        id="id", testcases=[], source_dir="source_dir", harness_config={}
639    )
640    mock_testsuite.name = "mock_testsuite"
641    mock_testsuite.harness_config = {}
642
643    handler = mock.Mock(options=mock.Mock(verbose=0), type_str="handler_type")
644
645    outdir = tmp_path / "gtest_out"
646    outdir.mkdir()
647
648    instance = TestInstance(
649        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
650    )
651    instance.handler = handler
652
653    test_obj = Pytest()
654    test_obj.configure(instance)
655
656    # Act
657    test_obj.pytest_run(timeout)
658    # Assert
659    assert test_obj.status == TwisterStatus.FAIL
660    assert exp_out in caplog.text
661
662
663TEST_DATA_6 = [(None), ("Test")]
664
665
666@pytest.mark.parametrize("name", TEST_DATA_6, ids=["no name", "provided name"])
667def test_get_harness(name):
668    # Arrange
669    harnessimporter = HarnessImporter()
670    harness_name = name
671
672    # Act
673    harness_class = harnessimporter.get_harness(harness_name)
674
675    # Assert
676    assert isinstance(harness_class, Test)
677
678
679TEST_DATA_7 = [
680    (
681        True,
682        "",
683        "Running TESTSUITE suite_name",
684        ["suite_name"],
685        { 'suite_name': { 'count': 1, 'repeat': 0 } },
686        {},
687        TwisterStatus.NONE,
688        True,
689        TwisterStatus.NONE,
690    ),
691    (
692        True,
693        "TC_START: Ztest case 'testcase' is not known in {} running suite(s)",
694        "START - test_testcase",
695        [],
696        {},
697        { 'dummy.test_id.testcase': { 'count': 1 } },
698        TwisterStatus.STARTED,
699        True,
700        TwisterStatus.NONE
701    ),
702    (
703        True,
704        "TC_END: Ztest case 'example' is not known in {} running suite(s)",
705        "PASS - test_example in 0 seconds",
706        [],
707        {},
708        {},
709        TwisterStatus.PASS,
710        True,
711        TwisterStatus.NONE,
712    ),
713    (
714        True,
715        "TC_END: Ztest case 'example' is not known in {} running suite(s)",
716        "SKIP - test_example in 0 seconds",
717        [],
718        {},
719        {},
720        TwisterStatus.SKIP,
721        True,
722        TwisterStatus.NONE,
723    ),
724    (
725        True,
726        "TC_END: Ztest case 'example' is not known in {} running suite(s)",
727        "FAIL - test_example in 0 seconds",
728        [],
729        {},
730        {},
731        TwisterStatus.FAIL,
732        True,
733        TwisterStatus.NONE,
734    ),
735    (
736        True,
737        "not a ztest and no state for dummy.test_id",
738        "START - test_testcase",
739        [],
740        {},
741        { 'dummy.test_id.testcase': { 'count': 1 } },
742        TwisterStatus.PASS,
743        False,
744        TwisterStatus.PASS,
745    ),
746    (
747        False,
748        "not a ztest and no state for dummy.test_id",
749        "START - test_testcase",
750        [],
751        {},
752        { 'testcase': { 'count': 1 } },
753        TwisterStatus.PASS,
754        False,
755        TwisterStatus.PASS,
756    ),
757    (
758        True,
759        "not a ztest and no state for dummy.test_id",
760        "START - test_testcase",
761        [],
762        {},
763        { 'dummy.test_id.testcase': { 'count': 1 } },
764        TwisterStatus.FAIL,
765        False,
766        TwisterStatus.FAIL,
767    ),
768]
769
770
771@pytest.mark.parametrize(
772    "detailed_id, exp_out, line, exp_suite_name, exp_started_suites, exp_started_cases, exp_status, ztest, state",
773    TEST_DATA_7,
774    ids=["testsuite", "testcase", "pass", "skip", "failed", "ztest pass", "ztest pass short id", "ztest fail"],
775)
776def test_test_handle(
777    tmp_path, caplog, detailed_id, exp_out, line,
778    exp_suite_name, exp_started_suites, exp_started_cases,
779    exp_status, ztest, state
780):
781    # Arrange
782    line = line
783    mock_platform = mock.Mock()
784    mock_platform.name = "mock_platform"
785    mock_platform.normalized_name = "mock_platform"
786
787    mock_testsuite = mock.Mock(id="dummy.test_id", testcases=[])
788    mock_testsuite.name = "dummy_suite/dummy.test_id"
789    mock_testsuite.harness_config = {}
790    mock_testsuite.ztest_suite_names = []
791    mock_testsuite.detailed_test_id = detailed_id
792    mock_testsuite.source_dir_rel = "dummy_suite"
793    mock_testsuite.compose_case_name.return_value = TestSuite.compose_case_name_(mock_testsuite, "testcase")
794
795    outdir = tmp_path / "ztest_out"
796    with mock.patch('twisterlib.testsuite.TestSuite.get_unique', return_value="dummy_suite"):
797        instance = TestInstance(
798            testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
799        )
800    instance.handler = mock.Mock(options=mock.Mock(verbose=0), type_str="handler_type")
801
802    test_obj = Test()
803    test_obj.configure(instance)
804    test_obj.id = "dummy.test_id"
805    test_obj.ztest = ztest
806    test_obj.status = state
807    test_obj.started_cases = {}
808
809    # Act
810    test_obj.handle(line)
811
812    # Assert
813    assert test_obj.detected_suite_names == exp_suite_name
814    assert test_obj.started_suites == exp_started_suites
815    assert test_obj.started_cases == exp_started_cases
816
817    assert exp_out in caplog.text
818    if not "Running" in line and exp_out == "":
819        assert test_obj.instance.testcases[0].status == exp_status
820    if "ztest" in exp_out:
821        assert test_obj.instance.testcases[1].status == exp_status
822
823
824@pytest.fixture
825def gtest(tmp_path):
826    mock_platform = mock.Mock()
827    mock_platform.name = "mock_platform"
828    mock_platform.normalized_name = "mock_platform"
829    mock_testsuite = mock.Mock()
830    mock_testsuite.name = "mock_testsuite"
831    mock_testsuite.detailed_test_id = True
832    mock_testsuite.id = "id"
833    mock_testsuite.testcases = []
834    mock_testsuite.harness_config = {}
835    outdir = tmp_path / "gtest_out"
836    outdir.mkdir()
837
838    instance = TestInstance(
839        testsuite=mock_testsuite, platform=mock_platform, toolchain='zephyr', outdir=outdir
840    )
841
842    harness = Gtest()
843    harness.configure(instance)
844    return harness
845
846
847def test_gtest_start_test_no_suites_detected(gtest):
848    process_logs(gtest, [SAMPLE_GTEST_START])
849    assert len(gtest.detected_suite_names) == 0
850    assert gtest.status == TwisterStatus.NONE
851
852
853def test_gtest_start_test(gtest):
854    process_logs(
855        gtest,
856        [
857            SAMPLE_GTEST_START,
858            SAMPLE_GTEST_FMT.format(
859                state=GTEST_START_STATE, suite="suite_name", test="test_name"
860            ),
861        ],
862    )
863    assert gtest.status == TwisterStatus.NONE
864    assert len(gtest.detected_suite_names) == 1
865    assert gtest.detected_suite_names[0] == "suite_name"
866    assert gtest.instance.get_case_by_name("id.suite_name.test_name") is not None
867    assert (
868        gtest.instance.get_case_by_name("id.suite_name.test_name").status
869        == TwisterStatus.STARTED
870    )
871
872
873def test_gtest_pass(gtest):
874    process_logs(
875        gtest,
876        [
877            SAMPLE_GTEST_START,
878            SAMPLE_GTEST_FMT.format(
879                state=GTEST_START_STATE, suite="suite_name", test="test_name"
880            ),
881            SAMPLE_GTEST_FMT.format(
882                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
883            ),
884        ],
885    )
886    assert gtest.status == TwisterStatus.NONE
887    assert len(gtest.detected_suite_names) == 1
888    assert gtest.detected_suite_names[0] == "suite_name"
889    assert (
890        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
891    )
892    assert (
893        gtest.instance.get_case_by_name("id.suite_name.test_name").status
894        == TwisterStatus.PASS
895    )
896
897
898def test_gtest_failed(gtest):
899    process_logs(
900        gtest,
901        [
902            SAMPLE_GTEST_START,
903            SAMPLE_GTEST_FMT.format(
904                state=GTEST_START_STATE, suite="suite_name", test="test_name"
905            ),
906            SAMPLE_GTEST_FMT.format(
907                state=GTEST_FAIL_STATE, suite="suite_name", test="test_name"
908            ),
909        ],
910    )
911    assert gtest.status == TwisterStatus.NONE
912    assert len(gtest.detected_suite_names) == 1
913    assert gtest.detected_suite_names[0] == "suite_name"
914    assert (
915        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
916    )
917    assert (
918        gtest.instance.get_case_by_name("id.suite_name.test_name").status
919        == TwisterStatus.FAIL
920    )
921
922
923def test_gtest_skipped(gtest):
924    process_logs(
925        gtest,
926        [
927            SAMPLE_GTEST_START,
928            SAMPLE_GTEST_FMT.format(
929                state=GTEST_START_STATE, suite="suite_name", test="test_name"
930            ),
931            SAMPLE_GTEST_FMT.format(
932                state=GTEST_SKIP_STATE, suite="suite_name", test="test_name"
933            ),
934        ],
935    )
936    assert gtest.status == TwisterStatus.NONE
937    assert len(gtest.detected_suite_names) == 1
938    assert gtest.detected_suite_names[0] == "suite_name"
939    assert (
940        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
941    )
942    assert (
943        gtest.instance.get_case_by_name("id.suite_name.test_name").status
944        == TwisterStatus.SKIP
945    )
946
947
948def test_gtest_all_pass(gtest):
949    process_logs(
950        gtest,
951        [
952            SAMPLE_GTEST_START,
953            SAMPLE_GTEST_FMT.format(
954                state=GTEST_START_STATE, suite="suite_name", test="test_name"
955            ),
956            SAMPLE_GTEST_FMT.format(
957                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
958            ),
959            SAMPLE_GTEST_END,
960        ],
961    )
962    assert gtest.status == TwisterStatus.PASS
963    assert len(gtest.detected_suite_names) == 1
964    assert gtest.detected_suite_names[0] == "suite_name"
965    assert (
966        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
967    )
968    assert (
969        gtest.instance.get_case_by_name("id.suite_name.test_name").status
970        == TwisterStatus.PASS
971    )
972
973
974def test_gtest_all_pass_with_variant(gtest):
975    process_logs(
976        gtest,
977        [
978            SAMPLE_GTEST_START,
979            SAMPLE_GTEST_FMT.format(
980                state=GTEST_START_STATE, suite="suite_name", test="test_name"
981            ),
982            SAMPLE_GTEST_FMT.format(
983                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
984            ),
985            SAMPLE_GTEST_END_VARIANT,
986        ],
987    )
988    assert gtest.status == "passed"
989    assert len(gtest.detected_suite_names) == 1
990    assert gtest.detected_suite_names[0] == "suite_name"
991    assert gtest.instance.get_case_by_name("id.suite_name.test_name") is not None
992    assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == "passed"
993
994
995def test_gtest_one_skipped(gtest):
996    process_logs(
997        gtest,
998        [
999            SAMPLE_GTEST_START,
1000            SAMPLE_GTEST_FMT.format(
1001                state=GTEST_START_STATE, suite="suite_name", test="test_name"
1002            ),
1003            SAMPLE_GTEST_FMT.format(
1004                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
1005            ),
1006            SAMPLE_GTEST_FMT.format(
1007                state=GTEST_START_STATE, suite="suite_name", test="test_name1"
1008            ),
1009            SAMPLE_GTEST_FMT.format(
1010                state=GTEST_SKIP_STATE, suite="suite_name", test="test_name1"
1011            ),
1012            SAMPLE_GTEST_END,
1013        ],
1014    )
1015    assert gtest.status == TwisterStatus.PASS
1016    assert len(gtest.detected_suite_names) == 1
1017    assert gtest.detected_suite_names[0] == "suite_name"
1018    assert (
1019        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
1020    )
1021    assert (
1022        gtest.instance.get_case_by_name("id.suite_name.test_name").status
1023        == TwisterStatus.PASS
1024    )
1025    assert (
1026        gtest.instance.get_case_by_name("id.suite_name.test_name1")
1027        != TwisterStatus.NONE
1028    )
1029    assert (
1030        gtest.instance.get_case_by_name("id.suite_name.test_name1").status
1031        == TwisterStatus.SKIP
1032    )
1033
1034
1035def test_gtest_one_fail(gtest):
1036    process_logs(
1037        gtest,
1038        [
1039            SAMPLE_GTEST_START,
1040            SAMPLE_GTEST_FMT.format(
1041                state=GTEST_START_STATE, suite="suite_name", test="test0"
1042            ),
1043            SAMPLE_GTEST_FMT.format(
1044                state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1045            ),
1046            SAMPLE_GTEST_FMT.format(
1047                state=GTEST_START_STATE, suite="suite_name", test="test1"
1048            ),
1049            SAMPLE_GTEST_FMT.format(
1050                state=GTEST_FAIL_STATE, suite="suite_name", test="test1"
1051            ),
1052            SAMPLE_GTEST_END,
1053        ],
1054    )
1055    assert gtest.status == TwisterStatus.FAIL
1056    assert len(gtest.detected_suite_names) == 1
1057    assert gtest.detected_suite_names[0] == "suite_name"
1058    assert gtest.instance.get_case_by_name("id.suite_name.test0") != TwisterStatus.NONE
1059    assert (
1060        gtest.instance.get_case_by_name("id.suite_name.test0").status
1061        == TwisterStatus.PASS
1062    )
1063    assert gtest.instance.get_case_by_name("id.suite_name.test1") != TwisterStatus.NONE
1064    assert (
1065        gtest.instance.get_case_by_name("id.suite_name.test1").status
1066        == TwisterStatus.FAIL
1067    )
1068
1069
1070def test_gtest_one_fail_with_variant(gtest):
1071    process_logs(
1072        gtest,
1073        [
1074            SAMPLE_GTEST_START,
1075            SAMPLE_GTEST_FMT.format(
1076                state=GTEST_START_STATE, suite="suite_name", test="test0"
1077            ),
1078            SAMPLE_GTEST_FMT.format(
1079                state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1080            ),
1081            SAMPLE_GTEST_FMT.format(
1082                state=GTEST_START_STATE, suite="suite_name", test="test1"
1083            ),
1084            SAMPLE_GTEST_FMT.format(
1085                state=GTEST_FAIL_STATE, suite="suite_name", test="test1"
1086            ),
1087            SAMPLE_GTEST_END_VARIANT,
1088        ],
1089    )
1090    assert gtest.status == "failed"
1091    assert len(gtest.detected_suite_names) == 1
1092    assert gtest.detected_suite_names[0] == "suite_name"
1093    assert gtest.instance.get_case_by_name("id.suite_name.test0") is not None
1094    assert gtest.instance.get_case_by_name("id.suite_name.test0").status == "passed"
1095    assert gtest.instance.get_case_by_name("id.suite_name.test1") is not None
1096    assert gtest.instance.get_case_by_name("id.suite_name.test1").status == "failed"
1097
1098
1099def test_gtest_one_fail_with_variant_and_param(gtest):
1100    process_logs(
1101        gtest,
1102        [
1103            SAMPLE_GTEST_START,
1104            SAMPLE_GTEST_FMT.format(
1105                state=GTEST_START_STATE, suite="suite_name", test="test0"
1106            ),
1107            SAMPLE_GTEST_FMT.format(
1108                state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1109            ),
1110            SAMPLE_GTEST_FMT.format(
1111                state=GTEST_START_STATE, suite="suite_name", test="test1"
1112            ),
1113            SAMPLE_GTEST_FMT_FAIL_WITH_PARAM.format(
1114                state=GTEST_FAIL_STATE, suite="suite_name", test="test1"
1115            ),
1116            SAMPLE_GTEST_END_VARIANT,
1117        ],
1118    )
1119    assert gtest.status == "failed"
1120    assert len(gtest.detected_suite_names) == 1
1121    assert gtest.detected_suite_names[0] == "suite_name"
1122    assert gtest.instance.get_case_by_name("id.suite_name.test0") is not None
1123    assert gtest.instance.get_case_by_name("id.suite_name.test0").status == "passed"
1124    assert gtest.instance.get_case_by_name("id.suite_name.test1") is not None
1125    assert gtest.instance.get_case_by_name("id.suite_name.test1").status == "failed"
1126
1127
1128def test_gtest_missing_result(gtest):
1129    with pytest.raises(
1130        AssertionError,
1131        match=r"gTest error, id.suite_name.test0 didn't finish",
1132    ):
1133        process_logs(
1134            gtest,
1135            [
1136                SAMPLE_GTEST_START,
1137                SAMPLE_GTEST_FMT.format(
1138                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1139                ),
1140                SAMPLE_GTEST_FMT.format(
1141                    state=GTEST_START_STATE, suite="suite_name", test="test1"
1142                ),
1143            ],
1144        )
1145
1146
1147def test_gtest_mismatch_result(gtest):
1148    with pytest.raises(
1149        AssertionError,
1150        match=r"gTest error, mismatched tests. Expected id.suite_name.test0 but got None",
1151    ):
1152        process_logs(
1153            gtest,
1154            [
1155                SAMPLE_GTEST_START,
1156                SAMPLE_GTEST_FMT.format(
1157                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1158                ),
1159                SAMPLE_GTEST_FMT.format(
1160                    state=GTEST_PASS_STATE, suite="suite_name", test="test1"
1161                ),
1162            ],
1163        )
1164
1165
1166def test_gtest_repeated_result(gtest):
1167    with pytest.raises(
1168        AssertionError,
1169        match=r"gTest error, mismatched tests. Expected id.suite_name.test1 but got id.suite_name.test0",
1170    ):
1171        process_logs(
1172            gtest,
1173            [
1174                SAMPLE_GTEST_START,
1175                SAMPLE_GTEST_FMT.format(
1176                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1177                ),
1178                SAMPLE_GTEST_FMT.format(
1179                    state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1180                ),
1181                SAMPLE_GTEST_FMT.format(
1182                    state=GTEST_START_STATE, suite="suite_name", test="test1"
1183                ),
1184                SAMPLE_GTEST_FMT.format(
1185                    state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1186                ),
1187            ],
1188        )
1189
1190
1191def test_gtest_repeated_run(gtest):
1192    with pytest.raises(
1193        AssertionError,
1194        match=r"gTest error, id.suite_name.test0 running twice",
1195    ):
1196        process_logs(
1197            gtest,
1198            [
1199                SAMPLE_GTEST_START,
1200                SAMPLE_GTEST_FMT.format(
1201                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1202                ),
1203                SAMPLE_GTEST_FMT.format(
1204                    state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1205                ),
1206                SAMPLE_GTEST_FMT.format(
1207                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1208                ),
1209            ],
1210        )
1211
1212
1213def test_bsim_build(monkeypatch, tmp_path):
1214    mocked_instance = mock.Mock()
1215    build_dir = tmp_path / "build_dir"
1216    os.makedirs(build_dir)
1217    mocked_instance.build_dir = str(build_dir)
1218    mocked_instance.name = "platform_name/test/dummy.test"
1219    mocked_instance.testsuite.harness_config = {}
1220
1221    harness = Bsim()
1222    harness.instance = mocked_instance
1223
1224    monkeypatch.setenv("BSIM_OUT_PATH", str(tmp_path))
1225    os.makedirs(os.path.join(tmp_path, "bin"), exist_ok=True)
1226    zephyr_exe_path = os.path.join(build_dir, "zephyr", "zephyr.exe")
1227    os.makedirs(os.path.dirname(zephyr_exe_path), exist_ok=True)
1228    with open(zephyr_exe_path, "w") as file:
1229        file.write("TEST_EXE")
1230
1231    harness.build()
1232
1233    new_exe_path = os.path.join(tmp_path, "bin", "bs_platform_name_test_dummy_test")
1234    assert os.path.exists(new_exe_path)
1235    with open(new_exe_path, "r") as file:
1236        exe_content = file.read()
1237    assert "TEST_EXE" in exe_content
1238