1#!/usr/bin/env python3
2
3# Copyright(c) 2023 Google LLC
4# SPDX-License-Identifier: Apache-2.0
5
6"""
7This test file contains testsuites for the Harness classes of twister
8"""
9import mock
10import sys
11import os
12import pytest
13import re
14import logging as logger
15
16# ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
17from conftest import ZEPHYR_BASE
18
19sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
20
21from twisterlib.harness import (
22    Bsim,
23    Console,
24    Gtest,
25    Harness,
26    HarnessImporter,
27    Pytest,
28    PytestHarnessException,
29    Robot,
30    Test,
31)
32from twisterlib.statuses import TwisterStatus
33from twisterlib.testsuite import TestSuite
34from twisterlib.testinstance import TestInstance
35
36GTEST_START_STATE = " RUN      "
37GTEST_PASS_STATE = "       OK "
38GTEST_SKIP_STATE = " DISABLED "
39GTEST_FAIL_STATE = "  FAILED  "
40SAMPLE_GTEST_START = (
41    "[00:00:00.000,000] <inf> label:  [==========] Running all tests."
42)
43SAMPLE_GTEST_FMT = (
44    "[00:00:00.000,000] <inf> label:  [{state}] {suite}.{test} (0ms)"
45)
46SAMPLE_GTEST_FMT_FAIL_WITH_PARAM = (
47    "[00:00:00.000,000] <inf> label:  "
48    + "[{state}] {suite}.{test}, where GetParam() = 8-byte object <0B-00 00-00 00-9A 80-F7> (0 ms total)"
49)
50SAMPLE_GTEST_END = (
51    "[00:00:00.000,000] <inf> label:  [==========] Done running all tests."
52)
53SAMPLE_GTEST_END_VARIANT = (
54    "[00:00:00.000,000] <inf> label:  [----------] Global test environment tear-down"
55)
56
57
58def process_logs(harness, logs):
59    for line in logs:
60        harness.handle(line)
61
62
63TEST_DATA_RECORDING = [
64    ([""], "^START:(?P<foo>.*):END", [], None),
65    (["START:bar:STOP"], "^START:(?P<foo>.*):END", [], None),
66    (["START:bar:END"], "^START:(?P<foo>.*):END", [{"foo": "bar"}], None),
67    (
68        ["START:bar:baz:END"],
69        "^START:(?P<foo>.*):(?P<boo>.*):END",
70        [{"foo": "bar", "boo": "baz"}],
71        None,
72    ),
73    (
74        ["START:bar:baz:END", "START:may:jun:END"],
75        "^START:(?P<foo>.*):(?P<boo>.*):END",
76        [{"foo": "bar", "boo": "baz"}, {"foo": "may", "boo": "jun"}],
77        None,
78    ),
79    (["START:bar:END"], "^START:(?P<foo>.*):END", [{"foo": "bar"}], []),
80    (["START:bar:END"], "^START:(?P<foo>.*):END", [{"foo": "bar"}], ["boo"]),
81    (
82        ["START:bad_json:END"],
83        "^START:(?P<foo>.*):END",
84        [
85            {
86                "foo": {
87                    "ERROR": {
88                        "msg": "Expecting value: line 1 column 1 (char 0)",
89                        "doc": "bad_json",
90                    }
91                }
92            }
93        ],
94        ["foo"],
95    ),
96    (["START::END"], "^START:(?P<foo>.*):END", [{"foo": {}}], ["foo"]),
97    (
98        ['START: {"one":1, "two":2} :END'],
99        "^START:(?P<foo>.*):END",
100        [{"foo": {"one": 1, "two": 2}}],
101        ["foo"],
102    ),
103    (
104        ['START: {"one":1, "two":2} :STOP:oops:END'],
105        "^START:(?P<foo>.*):STOP:(?P<boo>.*):END",
106        [{"foo": {"one": 1, "two": 2}, "boo": "oops"}],
107        ["foo"],
108    ),
109    (
110        ['START: {"one":1, "two":2} :STOP:{"oops":0}:END'],
111        "^START:(?P<foo>.*):STOP:(?P<boo>.*):END",
112        [{"foo": {"one": 1, "two": 2}, "boo": {"oops": 0}}],
113        ["foo", "boo"],
114    ),
115]
116
117
118@pytest.mark.parametrize(
119    "lines, pattern, expected_records, as_json",
120    TEST_DATA_RECORDING,
121    ids=[
122        "empty",
123        "no match",
124        "match 1 field",
125        "match 2 fields",
126        "match 2 records",
127        "as_json empty",
128        "as_json no such field",
129        "error parsing json",
130        "empty json value",
131        "simple json",
132        "plain field and json field",
133        "two json fields",
134    ],
135)
136def test_harness_parse_record(lines, pattern, expected_records, as_json):
137    harness = Harness()
138    harness.record = {"regex": pattern}
139    harness.record_pattern = re.compile(pattern)
140
141    harness.record_as_json = as_json
142    if as_json is not None:
143        harness.record["as_json"] = as_json
144
145    assert not harness.recording
146
147    for line in lines:
148        harness.parse_record(line)
149
150    assert harness.recording == expected_records
151
152
153TEST_DATA_1 = [
154    ("RunID: 12345", False, False, False, TwisterStatus.NONE, True),
155    ("PROJECT EXECUTION SUCCESSFUL", False, False, False, TwisterStatus.PASS, False),
156    ("PROJECT EXECUTION SUCCESSFUL", True, False, False, TwisterStatus.FAIL, False),
157    ("PROJECT EXECUTION FAILED", False, False, False, TwisterStatus.FAIL, False),
158    ("ZEPHYR FATAL ERROR", False, True, False, TwisterStatus.NONE, False),
159    ("GCOV_COVERAGE_DUMP_START", None, None, True, TwisterStatus.NONE, False),
160    ("GCOV_COVERAGE_DUMP_END", None, None, False, TwisterStatus.NONE, False),
161]
162
163
164@pytest.mark.parametrize(
165    "line, fault, fail_on_fault, cap_cov, exp_stat, exp_id",
166    TEST_DATA_1,
167    ids=[
168        "match id",
169        "passed passed",
170        "passed failed",
171        "failed failed",
172        "fail on fault",
173        "GCOV START",
174        "GCOV END",
175    ],
176)
177def test_harness_process_test(line, fault, fail_on_fault, cap_cov, exp_stat, exp_id):
178    # Arrange
179    harness = Harness()
180    harness.run_id = 12345
181    harness.status = TwisterStatus.NONE
182    harness.fault = fault
183    harness.fail_on_fault = fail_on_fault
184    mock.patch.object(Harness, "parse_record", return_value=None)
185
186    # Act
187    harness.process_test(line)
188
189    # Assert
190    assert harness.matched_run_id == exp_id
191    assert harness.status == exp_stat
192    assert harness.capture_coverage == cap_cov
193    assert harness.recording == []
194
195
196def test_robot_configure(tmp_path):
197    # Arrange
198    mock_platform = mock.Mock()
199    mock_platform.name = "mock_platform"
200    mock_platform.normalized_name = "mock_platform"
201
202    mock_testsuite = mock.Mock(id="id", testcases=[])
203    mock_testsuite.name = "mock_testsuite"
204    mock_testsuite.harness_config = {}
205
206    outdir = tmp_path / "gtest_out"
207    outdir.mkdir()
208
209    instance = TestInstance(
210        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
211    )
212    instance.testsuite.harness_config = {
213        "robot_testsuite": "/path/to/robot/test",
214        "robot_option": "test_option",
215    }
216    robot_harness = Robot()
217
218    # Act
219    robot_harness.configure(instance)
220
221    # Assert
222    assert robot_harness.instance == instance
223    assert robot_harness.path == "/path/to/robot/test"
224    assert robot_harness.option == "test_option"
225
226
227def test_robot_handle(tmp_path):
228    # Arrange
229    mock_platform = mock.Mock()
230    mock_platform.name = "mock_platform"
231    mock_platform.normalized_name = "mock_platform"
232
233    mock_testsuite = mock.Mock(id="id", testcases=[])
234    mock_testsuite.name = "mock_testsuite"
235    mock_testsuite.harness_config = {}
236
237    outdir = tmp_path / "gtest_out"
238    outdir.mkdir()
239
240    instance = TestInstance(
241        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
242    )
243
244    handler = Robot()
245    handler.instance = instance
246    handler.id = "test_case_1"
247
248    line = "Test case passed"
249
250    # Act
251    handler.handle(line)
252    tc = instance.get_case_or_create("test_case_1")
253
254    # Assert
255    assert instance.status == TwisterStatus.PASS
256    assert tc.status == TwisterStatus.PASS
257
258
259TEST_DATA_2 = [
260    ("", 0, TwisterStatus.PASS),
261    ("Robot test failure: sourcedir for mock_platform", 1, TwisterStatus.FAIL),
262]
263
264
265@pytest.mark.parametrize(
266    "exp_out, returncode, expected_status", TEST_DATA_2, ids=["passed", "failed"]
267)
268def test_robot_run_robot_test(tmp_path, caplog, exp_out, returncode, expected_status):
269    # Arrange
270    command = ["command"]
271
272    handler = mock.Mock()
273    handler.sourcedir = "sourcedir"
274    handler.log = "handler.log"
275
276    path = "path"
277    option = "option"
278
279    mock_platform = mock.Mock()
280    mock_platform.name = "mock_platform"
281    mock_platform.normalized_name = "mock_platform"
282
283    mock_testsuite = mock.Mock(id="id", testcases=[mock.Mock()])
284    mock_testsuite.name = "mock_testsuite"
285    mock_testsuite.harness_config = {}
286
287    outdir = tmp_path / "gtest_out"
288    outdir.mkdir()
289
290    instance = TestInstance(
291        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
292    )
293    instance.build_dir = "build_dir"
294
295    open_mock = mock.mock_open()
296
297    robot = Robot()
298    robot.path = path
299    robot.option = option
300    robot.instance = instance
301    proc_mock = mock.Mock(
302        returncode=returncode, communicate=mock.Mock(return_value=(b"output", None))
303    )
304    popen_mock = mock.Mock(
305        return_value=mock.Mock(
306            __enter__=mock.Mock(return_value=proc_mock), __exit__=mock.Mock()
307        )
308    )
309
310    # Act
311    with mock.patch("subprocess.Popen", popen_mock) as mock.mock_popen, mock.patch(
312        "builtins.open", open_mock
313    ):
314        robot.run_robot_test(command, handler)
315
316    # Assert
317    assert instance.status == expected_status
318    open_mock().write.assert_called_once_with("output")
319    assert exp_out in caplog.text
320
321
322TEST_DATA_3 = [
323    ("one_line", None),
324    ("multi_line", 2),
325]
326
327
328@pytest.mark.parametrize(
329    "type, num_patterns", TEST_DATA_3, ids=["one line", "multi line"]
330)
331def test_console_configure(tmp_path, type, num_patterns):
332    # Arrange
333    mock_platform = mock.Mock()
334    mock_platform.name = "mock_platform"
335    mock_platform.normalized_name = "mock_platform"
336
337    mock_testsuite = mock.Mock(id="id", testcases=[])
338    mock_testsuite.name = "mock_testsuite"
339    mock_testsuite.harness_config = {}
340
341    outdir = tmp_path / "gtest_out"
342    outdir.mkdir()
343
344    instance = TestInstance(
345        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
346    )
347    instance.testsuite.harness_config = {
348        "type": type,
349        "regex": ["pattern1", "pattern2"],
350    }
351    console = Console()
352
353    # Act
354    console.configure(instance)
355
356    # Assert
357    if num_patterns == 2:
358        assert len(console.patterns) == num_patterns
359        assert [pattern.pattern for pattern in console.patterns] == [
360            "pattern1",
361            "pattern2",
362        ]
363    else:
364        assert console.pattern.pattern == "pattern1"
365
366
367TEST_DATA_4 = [
368    ("one_line", True, TwisterStatus.PASS, "line", False, False),
369    ("multi_line", True, TwisterStatus.PASS, "line", False, False),
370    ("multi_line", False, TwisterStatus.PASS, "line", False, False),
371    ("invalid_type", False, TwisterStatus.NONE, "line", False, False),
372    ("invalid_type", False, TwisterStatus.NONE, "ERROR", True, False),
373    ("invalid_type", False, TwisterStatus.NONE, "COVERAGE_START", False, True),
374    ("invalid_type", False, TwisterStatus.NONE, "COVERAGE_END", False, False),
375]
376
377
378@pytest.mark.parametrize(
379    "line_type, ordered_val, exp_state, line, exp_fault, exp_capture",
380    TEST_DATA_4,
381    ids=[
382        "one line",
383        "multi line ordered",
384        "multi line not ordered",
385        "logger error",
386        "fail on fault",
387        "GCOV START",
388        "GCOV END",
389    ],
390)
391def test_console_handle(
392    tmp_path, line_type, ordered_val, exp_state, line, exp_fault, exp_capture
393):
394    mock_platform = mock.Mock()
395    mock_platform.name = "mock_platform"
396    mock_platform.normalized_name = "mock_platform"
397
398    mock_testsuite = mock.Mock(id="id", testcases=[])
399    mock_testsuite.name = "mock_testsuite"
400    mock_testsuite.harness_config = {}
401
402    outdir = tmp_path / "gtest_out"
403    outdir.mkdir()
404
405    instance = TestInstance(
406        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
407    )
408
409    console = Console()
410    console.instance = instance
411    console.type = line_type
412    console.patterns = [re.compile("pattern1"), re.compile("pattern2")]
413    console.pattern = re.compile("pattern")
414    console.patterns_expected = 0
415    console.status = TwisterStatus.NONE
416    console.fail_on_fault = True
417    console.FAULT = "ERROR"
418    console.GCOV_START = "COVERAGE_START"
419    console.GCOV_END = "COVERAGE_END"
420    console.record = {"regex": "RESULT: (.*)"}
421    console.fieldnames = []
422    console.recording = []
423    console.regex = ["regex1", "regex2"]
424    console.id = "test_case_1"
425
426    instance.get_case_or_create("test_case_1")
427    instance.testsuite.id = "test_suite_1"
428
429    console.next_pattern = 0
430    console.ordered = ordered_val
431    line = line
432    console.handle(line)
433
434    line1 = "pattern1"
435    line2 = "pattern2"
436    console.handle(line1)
437    console.handle(line2)
438    assert console.status == exp_state
439    with pytest.raises(Exception):
440        console.handle(line)
441        assert logger.error.called
442    assert console.fault == exp_fault
443    assert console.capture_coverage == exp_capture
444
445
446TEST_DATA_5 = [("serial_pty", 0), (None, 0), (None, 1)]
447
448
449@pytest.mark.parametrize(
450    "pty_value, hardware_value",
451    TEST_DATA_5,
452    ids=["hardware pty", "hardware", "non hardware"],
453)
454def test_pytest__generate_parameters_for_hardware(tmp_path, pty_value, hardware_value):
455    # Arrange
456    mock_platform = mock.Mock()
457    mock_platform.name = "mock_platform"
458    mock_platform.normalized_name = "mock_platform"
459
460    mock_testsuite = mock.Mock(id="id", testcases=[])
461    mock_testsuite.name = "mock_testsuite"
462    mock_testsuite.harness_config = {}
463
464    outdir = tmp_path / "gtest_out"
465    outdir.mkdir()
466
467    instance = TestInstance(
468        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
469    )
470
471    handler = mock.Mock()
472    handler.instance = instance
473
474    hardware = mock.Mock()
475    hardware.serial_pty = pty_value
476    hardware.serial = "serial"
477    hardware.baud = 115200
478    hardware.runner = "runner"
479    hardware.runner_params = ["--runner-param1", "runner-param2"]
480    hardware.fixtures = ["fixture1:option1", "fixture2"]
481
482    options = handler.options
483    options.west_flash = "args"
484
485    hardware.probe_id = "123"
486    hardware.product = "product"
487    hardware.pre_script = "pre_script"
488    hardware.post_flash_script = "post_flash_script"
489    hardware.post_script = "post_script"
490
491    pytest_test = Pytest()
492    pytest_test.configure(instance)
493
494    # Act
495    if hardware_value == 0:
496        handler.get_hardware.return_value = hardware
497        command = pytest_test._generate_parameters_for_hardware(handler)
498    else:
499        handler.get_hardware.return_value = None
500
501    # Assert
502    if hardware_value == 1:
503        with pytest.raises(PytestHarnessException) as exinfo:
504            pytest_test._generate_parameters_for_hardware(handler)
505        assert str(exinfo.value) == "Hardware is not available"
506    else:
507        assert "--device-type=hardware" in command
508        if pty_value == "serial_pty":
509            assert "--device-serial-pty=serial_pty" in command
510        else:
511            assert "--device-serial=serial" in command
512            assert "--device-serial-baud=115200" in command
513        assert "--runner=runner" in command
514        assert "--runner-params=--runner-param1" in command
515        assert "--runner-params=runner-param2" in command
516        assert "--west-flash-extra-args=args" in command
517        assert "--device-id=123" in command
518        assert "--device-product=product" in command
519        assert "--pre-script=pre_script" in command
520        assert "--post-flash-script=post_flash_script" in command
521        assert "--post-script=post_script" in command
522        assert "--twister-fixture=fixture1:option1" in command
523        assert "--twister-fixture=fixture2" in command
524
525
526def test__update_command_with_env_dependencies():
527    cmd = ["cmd"]
528    pytest_test = Pytest()
529    mock.patch.object(Pytest, "PYTEST_PLUGIN_INSTALLED", False)
530
531    # Act
532    result_cmd, _ = pytest_test._update_command_with_env_dependencies(cmd)
533
534    # Assert
535    assert result_cmd == ["cmd", "-p", "twister_harness.plugin"]
536
537
538def test_pytest_run(tmp_path, caplog):
539    # Arrange
540    timeout = 10
541    cmd = ["command"]
542    exp_out = "Support for handler handler_type not implemented yet"
543
544    harness = Pytest()
545    harness = mock.create_autospec(harness)
546
547    mock.patch.object(Pytest, "generate_command", return_value=cmd)
548    mock.patch.object(Pytest, "run_command")
549
550    mock_platform = mock.Mock()
551    mock_platform.name = "mock_platform"
552    mock_platform.normalized_name = "mock_platform"
553
554    mock_testsuite = mock.Mock(
555        id="id", testcases=[], source_dir="source_dir", harness_config={}
556    )
557    mock_testsuite.name = "mock_testsuite"
558    mock_testsuite.harness_config = {}
559
560    handler = mock.Mock(options=mock.Mock(verbose=0), type_str="handler_type")
561
562    outdir = tmp_path / "gtest_out"
563    outdir.mkdir()
564
565    instance = TestInstance(
566        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
567    )
568    instance.handler = handler
569
570    test_obj = Pytest()
571    test_obj.configure(instance)
572
573    # Act
574    test_obj.pytest_run(timeout)
575    # Assert
576    assert test_obj.status == TwisterStatus.FAIL
577    assert exp_out in caplog.text
578
579
580TEST_DATA_6 = [(None), ("Test")]
581
582
583@pytest.mark.parametrize("name", TEST_DATA_6, ids=["no name", "provided name"])
584def test_get_harness(name):
585    # Arrange
586    harnessimporter = HarnessImporter()
587    harness_name = name
588
589    # Act
590    harness_class = harnessimporter.get_harness(harness_name)
591
592    # Assert
593    assert isinstance(harness_class, Test)
594
595
596TEST_DATA_7 = [
597    (
598        True,
599        "",
600        "Running TESTSUITE suite_name",
601        ["suite_name"],
602        { 'suite_name': { 'count': 1, 'repeat': 0 } },
603        {},
604        TwisterStatus.NONE,
605        True,
606        TwisterStatus.NONE,
607    ),
608    (
609        True,
610        "On TC_START: Ztest case 'testcase' is not known in {} running suite(s)",
611        "START - test_testcase",
612        [],
613        {},
614        { 'dummy.test_id.testcase': { 'count': 1 } },
615        TwisterStatus.STARTED,
616        True,
617        TwisterStatus.NONE
618    ),
619    (
620        True,
621        "On TC_END: Ztest case 'example' is not known in {} running suite(s)",
622        "PASS - test_example in 0 seconds",
623        [],
624        {},
625        {},
626        TwisterStatus.PASS,
627        True,
628        TwisterStatus.NONE,
629    ),
630    (
631        True,
632        "On TC_END: Ztest case 'example' is not known in {} running suite(s)",
633        "SKIP - test_example in 0 seconds",
634        [],
635        {},
636        {},
637        TwisterStatus.SKIP,
638        True,
639        TwisterStatus.NONE,
640    ),
641    (
642        True,
643        "On TC_END: Ztest case 'example' is not known in {} running suite(s)",
644        "FAIL - test_example in 0 seconds",
645        [],
646        {},
647        {},
648        TwisterStatus.FAIL,
649        True,
650        TwisterStatus.NONE,
651    ),
652    (
653        True,
654        "not a ztest and no state for dummy.test_id",
655        "START - test_testcase",
656        [],
657        {},
658        { 'dummy.test_id.testcase': { 'count': 1 } },
659        TwisterStatus.PASS,
660        False,
661        TwisterStatus.PASS,
662    ),
663    (
664        False,
665        "not a ztest and no state for dummy.test_id",
666        "START - test_testcase",
667        [],
668        {},
669        { 'testcase': { 'count': 1 } },
670        TwisterStatus.PASS,
671        False,
672        TwisterStatus.PASS,
673    ),
674    (
675        True,
676        "not a ztest and no state for dummy.test_id",
677        "START - test_testcase",
678        [],
679        {},
680        { 'dummy.test_id.testcase': { 'count': 1 } },
681        TwisterStatus.FAIL,
682        False,
683        TwisterStatus.FAIL,
684    ),
685]
686
687
688@pytest.mark.parametrize(
689    "detailed_id, exp_out, line, exp_suite_name, exp_started_suites, exp_started_cases, exp_status, ztest, state",
690    TEST_DATA_7,
691    ids=["testsuite", "testcase", "pass", "skip", "failed", "ztest pass", "ztest pass short id", "ztest fail"],
692)
693def test_test_handle(
694    tmp_path, caplog, detailed_id, exp_out, line,
695    exp_suite_name, exp_started_suites, exp_started_cases,
696    exp_status, ztest, state
697):
698    # Arrange
699    line = line
700    mock_platform = mock.Mock()
701    mock_platform.name = "mock_platform"
702    mock_platform.normalized_name = "mock_platform"
703
704    mock_testsuite = mock.Mock(id="dummy.test_id", testcases=[])
705    mock_testsuite.name = "dummy_suite/dummy.test_id"
706    mock_testsuite.harness_config = {}
707    mock_testsuite.ztest_suite_names = []
708    mock_testsuite.detailed_test_id = detailed_id
709    mock_testsuite.source_dir_rel = "dummy_suite"
710    mock_testsuite.compose_case_name.return_value = TestSuite.compose_case_name_(mock_testsuite, "testcase")
711
712    outdir = tmp_path / "ztest_out"
713    with mock.patch('twisterlib.testsuite.TestSuite.get_unique', return_value="dummy_suite"):
714        instance = TestInstance(
715            testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
716        )
717
718    test_obj = Test()
719    test_obj.configure(instance)
720    test_obj.id = "dummy.test_id"
721    test_obj.ztest = ztest
722    test_obj.status = state
723    test_obj.started_cases = {}
724
725    # Act
726    test_obj.handle(line)
727
728    # Assert
729    assert test_obj.detected_suite_names == exp_suite_name
730    assert test_obj.started_suites == exp_started_suites
731    assert test_obj.started_cases == exp_started_cases
732
733    assert exp_out in caplog.text
734    if not "Running" in line and exp_out == "":
735        assert test_obj.instance.testcases[0].status == exp_status
736    if "ztest" in exp_out:
737        assert test_obj.instance.testcases[1].status == exp_status
738
739
740@pytest.fixture
741def gtest(tmp_path):
742    mock_platform = mock.Mock()
743    mock_platform.name = "mock_platform"
744    mock_platform.normalized_name = "mock_platform"
745    mock_testsuite = mock.Mock()
746    mock_testsuite.name = "mock_testsuite"
747    mock_testsuite.detailed_test_id = True
748    mock_testsuite.id = "id"
749    mock_testsuite.testcases = []
750    mock_testsuite.harness_config = {}
751    outdir = tmp_path / "gtest_out"
752    outdir.mkdir()
753
754    instance = TestInstance(
755        testsuite=mock_testsuite, platform=mock_platform, outdir=outdir
756    )
757
758    harness = Gtest()
759    harness.configure(instance)
760    return harness
761
762
763def test_gtest_start_test_no_suites_detected(gtest):
764    process_logs(gtest, [SAMPLE_GTEST_START])
765    assert len(gtest.detected_suite_names) == 0
766    assert gtest.status == TwisterStatus.NONE
767
768
769def test_gtest_start_test(gtest):
770    process_logs(
771        gtest,
772        [
773            SAMPLE_GTEST_START,
774            SAMPLE_GTEST_FMT.format(
775                state=GTEST_START_STATE, suite="suite_name", test="test_name"
776            ),
777        ],
778    )
779    assert gtest.status == TwisterStatus.NONE
780    assert len(gtest.detected_suite_names) == 1
781    assert gtest.detected_suite_names[0] == "suite_name"
782    assert gtest.instance.get_case_by_name("id.suite_name.test_name") is not None
783    assert (
784        gtest.instance.get_case_by_name("id.suite_name.test_name").status
785        == TwisterStatus.STARTED
786    )
787
788
789def test_gtest_pass(gtest):
790    process_logs(
791        gtest,
792        [
793            SAMPLE_GTEST_START,
794            SAMPLE_GTEST_FMT.format(
795                state=GTEST_START_STATE, suite="suite_name", test="test_name"
796            ),
797            SAMPLE_GTEST_FMT.format(
798                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
799            ),
800        ],
801    )
802    assert gtest.status == TwisterStatus.NONE
803    assert len(gtest.detected_suite_names) == 1
804    assert gtest.detected_suite_names[0] == "suite_name"
805    assert (
806        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
807    )
808    assert (
809        gtest.instance.get_case_by_name("id.suite_name.test_name").status
810        == TwisterStatus.PASS
811    )
812
813
814def test_gtest_failed(gtest):
815    process_logs(
816        gtest,
817        [
818            SAMPLE_GTEST_START,
819            SAMPLE_GTEST_FMT.format(
820                state=GTEST_START_STATE, suite="suite_name", test="test_name"
821            ),
822            SAMPLE_GTEST_FMT.format(
823                state=GTEST_FAIL_STATE, suite="suite_name", test="test_name"
824            ),
825        ],
826    )
827    assert gtest.status == TwisterStatus.NONE
828    assert len(gtest.detected_suite_names) == 1
829    assert gtest.detected_suite_names[0] == "suite_name"
830    assert (
831        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
832    )
833    assert (
834        gtest.instance.get_case_by_name("id.suite_name.test_name").status
835        == TwisterStatus.FAIL
836    )
837
838
839def test_gtest_skipped(gtest):
840    process_logs(
841        gtest,
842        [
843            SAMPLE_GTEST_START,
844            SAMPLE_GTEST_FMT.format(
845                state=GTEST_START_STATE, suite="suite_name", test="test_name"
846            ),
847            SAMPLE_GTEST_FMT.format(
848                state=GTEST_SKIP_STATE, suite="suite_name", test="test_name"
849            ),
850        ],
851    )
852    assert gtest.status == TwisterStatus.NONE
853    assert len(gtest.detected_suite_names) == 1
854    assert gtest.detected_suite_names[0] == "suite_name"
855    assert (
856        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
857    )
858    assert (
859        gtest.instance.get_case_by_name("id.suite_name.test_name").status
860        == TwisterStatus.SKIP
861    )
862
863
864def test_gtest_all_pass(gtest):
865    process_logs(
866        gtest,
867        [
868            SAMPLE_GTEST_START,
869            SAMPLE_GTEST_FMT.format(
870                state=GTEST_START_STATE, suite="suite_name", test="test_name"
871            ),
872            SAMPLE_GTEST_FMT.format(
873                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
874            ),
875            SAMPLE_GTEST_END,
876        ],
877    )
878    assert gtest.status == TwisterStatus.PASS
879    assert len(gtest.detected_suite_names) == 1
880    assert gtest.detected_suite_names[0] == "suite_name"
881    assert (
882        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
883    )
884    assert (
885        gtest.instance.get_case_by_name("id.suite_name.test_name").status
886        == TwisterStatus.PASS
887    )
888
889
890def test_gtest_all_pass_with_variant(gtest):
891    process_logs(
892        gtest,
893        [
894            SAMPLE_GTEST_START,
895            SAMPLE_GTEST_FMT.format(
896                state=GTEST_START_STATE, suite="suite_name", test="test_name"
897            ),
898            SAMPLE_GTEST_FMT.format(
899                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
900            ),
901            SAMPLE_GTEST_END_VARIANT,
902        ],
903    )
904    assert gtest.status == "passed"
905    assert len(gtest.detected_suite_names) == 1
906    assert gtest.detected_suite_names[0] == "suite_name"
907    assert gtest.instance.get_case_by_name("id.suite_name.test_name") is not None
908    assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == "passed"
909
910
911def test_gtest_one_skipped(gtest):
912    process_logs(
913        gtest,
914        [
915            SAMPLE_GTEST_START,
916            SAMPLE_GTEST_FMT.format(
917                state=GTEST_START_STATE, suite="suite_name", test="test_name"
918            ),
919            SAMPLE_GTEST_FMT.format(
920                state=GTEST_PASS_STATE, suite="suite_name", test="test_name"
921            ),
922            SAMPLE_GTEST_FMT.format(
923                state=GTEST_START_STATE, suite="suite_name", test="test_name1"
924            ),
925            SAMPLE_GTEST_FMT.format(
926                state=GTEST_SKIP_STATE, suite="suite_name", test="test_name1"
927            ),
928            SAMPLE_GTEST_END,
929        ],
930    )
931    assert gtest.status == TwisterStatus.PASS
932    assert len(gtest.detected_suite_names) == 1
933    assert gtest.detected_suite_names[0] == "suite_name"
934    assert (
935        gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
936    )
937    assert (
938        gtest.instance.get_case_by_name("id.suite_name.test_name").status
939        == TwisterStatus.PASS
940    )
941    assert (
942        gtest.instance.get_case_by_name("id.suite_name.test_name1")
943        != TwisterStatus.NONE
944    )
945    assert (
946        gtest.instance.get_case_by_name("id.suite_name.test_name1").status
947        == TwisterStatus.SKIP
948    )
949
950
951def test_gtest_one_fail(gtest):
952    process_logs(
953        gtest,
954        [
955            SAMPLE_GTEST_START,
956            SAMPLE_GTEST_FMT.format(
957                state=GTEST_START_STATE, suite="suite_name", test="test0"
958            ),
959            SAMPLE_GTEST_FMT.format(
960                state=GTEST_PASS_STATE, suite="suite_name", test="test0"
961            ),
962            SAMPLE_GTEST_FMT.format(
963                state=GTEST_START_STATE, suite="suite_name", test="test1"
964            ),
965            SAMPLE_GTEST_FMT.format(
966                state=GTEST_FAIL_STATE, suite="suite_name", test="test1"
967            ),
968            SAMPLE_GTEST_END,
969        ],
970    )
971    assert gtest.status == TwisterStatus.FAIL
972    assert len(gtest.detected_suite_names) == 1
973    assert gtest.detected_suite_names[0] == "suite_name"
974    assert gtest.instance.get_case_by_name("id.suite_name.test0") != TwisterStatus.NONE
975    assert (
976        gtest.instance.get_case_by_name("id.suite_name.test0").status
977        == TwisterStatus.PASS
978    )
979    assert gtest.instance.get_case_by_name("id.suite_name.test1") != TwisterStatus.NONE
980    assert (
981        gtest.instance.get_case_by_name("id.suite_name.test1").status
982        == TwisterStatus.FAIL
983    )
984
985
986def test_gtest_one_fail_with_variant(gtest):
987    process_logs(
988        gtest,
989        [
990            SAMPLE_GTEST_START,
991            SAMPLE_GTEST_FMT.format(
992                state=GTEST_START_STATE, suite="suite_name", test="test0"
993            ),
994            SAMPLE_GTEST_FMT.format(
995                state=GTEST_PASS_STATE, suite="suite_name", test="test0"
996            ),
997            SAMPLE_GTEST_FMT.format(
998                state=GTEST_START_STATE, suite="suite_name", test="test1"
999            ),
1000            SAMPLE_GTEST_FMT.format(
1001                state=GTEST_FAIL_STATE, suite="suite_name", test="test1"
1002            ),
1003            SAMPLE_GTEST_END_VARIANT,
1004        ],
1005    )
1006    assert gtest.status == "failed"
1007    assert len(gtest.detected_suite_names) == 1
1008    assert gtest.detected_suite_names[0] == "suite_name"
1009    assert gtest.instance.get_case_by_name("id.suite_name.test0") is not None
1010    assert gtest.instance.get_case_by_name("id.suite_name.test0").status == "passed"
1011    assert gtest.instance.get_case_by_name("id.suite_name.test1") is not None
1012    assert gtest.instance.get_case_by_name("id.suite_name.test1").status == "failed"
1013
1014
1015def test_gtest_one_fail_with_variant_and_param(gtest):
1016    process_logs(
1017        gtest,
1018        [
1019            SAMPLE_GTEST_START,
1020            SAMPLE_GTEST_FMT.format(
1021                state=GTEST_START_STATE, suite="suite_name", test="test0"
1022            ),
1023            SAMPLE_GTEST_FMT.format(
1024                state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1025            ),
1026            SAMPLE_GTEST_FMT.format(
1027                state=GTEST_START_STATE, suite="suite_name", test="test1"
1028            ),
1029            SAMPLE_GTEST_FMT_FAIL_WITH_PARAM.format(
1030                state=GTEST_FAIL_STATE, suite="suite_name", test="test1"
1031            ),
1032            SAMPLE_GTEST_END_VARIANT,
1033        ],
1034    )
1035    assert gtest.status == "failed"
1036    assert len(gtest.detected_suite_names) == 1
1037    assert gtest.detected_suite_names[0] == "suite_name"
1038    assert gtest.instance.get_case_by_name("id.suite_name.test0") is not None
1039    assert gtest.instance.get_case_by_name("id.suite_name.test0").status == "passed"
1040    assert gtest.instance.get_case_by_name("id.suite_name.test1") is not None
1041    assert gtest.instance.get_case_by_name("id.suite_name.test1").status == "failed"
1042
1043
1044def test_gtest_missing_result(gtest):
1045    with pytest.raises(
1046        AssertionError,
1047        match=r"gTest error, id.suite_name.test0 didn't finish",
1048    ):
1049        process_logs(
1050            gtest,
1051            [
1052                SAMPLE_GTEST_START,
1053                SAMPLE_GTEST_FMT.format(
1054                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1055                ),
1056                SAMPLE_GTEST_FMT.format(
1057                    state=GTEST_START_STATE, suite="suite_name", test="test1"
1058                ),
1059            ],
1060        )
1061
1062
1063def test_gtest_mismatch_result(gtest):
1064    with pytest.raises(
1065        AssertionError,
1066        match=r"gTest error, mismatched tests. Expected id.suite_name.test0 but got None",
1067    ):
1068        process_logs(
1069            gtest,
1070            [
1071                SAMPLE_GTEST_START,
1072                SAMPLE_GTEST_FMT.format(
1073                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1074                ),
1075                SAMPLE_GTEST_FMT.format(
1076                    state=GTEST_PASS_STATE, suite="suite_name", test="test1"
1077                ),
1078            ],
1079        )
1080
1081
1082def test_gtest_repeated_result(gtest):
1083    with pytest.raises(
1084        AssertionError,
1085        match=r"gTest error, mismatched tests. Expected id.suite_name.test1 but got id.suite_name.test0",
1086    ):
1087        process_logs(
1088            gtest,
1089            [
1090                SAMPLE_GTEST_START,
1091                SAMPLE_GTEST_FMT.format(
1092                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1093                ),
1094                SAMPLE_GTEST_FMT.format(
1095                    state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1096                ),
1097                SAMPLE_GTEST_FMT.format(
1098                    state=GTEST_START_STATE, suite="suite_name", test="test1"
1099                ),
1100                SAMPLE_GTEST_FMT.format(
1101                    state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1102                ),
1103            ],
1104        )
1105
1106
1107def test_gtest_repeated_run(gtest):
1108    with pytest.raises(
1109        AssertionError,
1110        match=r"gTest error, id.suite_name.test0 running twice",
1111    ):
1112        process_logs(
1113            gtest,
1114            [
1115                SAMPLE_GTEST_START,
1116                SAMPLE_GTEST_FMT.format(
1117                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1118                ),
1119                SAMPLE_GTEST_FMT.format(
1120                    state=GTEST_PASS_STATE, suite="suite_name", test="test0"
1121                ),
1122                SAMPLE_GTEST_FMT.format(
1123                    state=GTEST_START_STATE, suite="suite_name", test="test0"
1124                ),
1125            ],
1126        )
1127
1128
1129def test_bsim_build(monkeypatch, tmp_path):
1130    mocked_instance = mock.Mock()
1131    build_dir = tmp_path / "build_dir"
1132    os.makedirs(build_dir)
1133    mocked_instance.build_dir = str(build_dir)
1134    mocked_instance.name = "platform_name/test/dummy.test"
1135    mocked_instance.testsuite.harness_config = {}
1136
1137    harness = Bsim()
1138    harness.instance = mocked_instance
1139
1140    monkeypatch.setenv("BSIM_OUT_PATH", str(tmp_path))
1141    os.makedirs(os.path.join(tmp_path, "bin"), exist_ok=True)
1142    zephyr_exe_path = os.path.join(build_dir, "zephyr", "zephyr.exe")
1143    os.makedirs(os.path.dirname(zephyr_exe_path), exist_ok=True)
1144    with open(zephyr_exe_path, "w") as file:
1145        file.write("TEST_EXE")
1146
1147    harness.build()
1148
1149    new_exe_path = os.path.join(tmp_path, "bin", "bs_platform_name_test_dummy_test")
1150    assert os.path.exists(new_exe_path)
1151    with open(new_exe_path, "r") as file:
1152        exe_content = file.read()
1153    assert "TEST_EXE" in exe_content
1154