1#!/usr/bin/env python3
2# Copyright (c) 2024 Intel Corporation
3#
4# SPDX-License-Identifier: Apache-2.0
5"""
6Blackbox tests for twister's command line functions changing test output.
7"""
8
9import importlib
10import re
11import mock
12import os
13import pytest
14import sys
15import json
16
17# pylint: disable=no-name-in-module
18from conftest import ZEPHYR_BASE, TEST_DATA, testsuite_filename_mock, clear_log_in_test
19from twisterlib.testplan import TestPlan
20
21
22@mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock)
23class TestOutput:
24    TESTDATA_1 = [
25        ([]),
26        (['-ll', 'DEBUG']),
27        (['-v']),
28        (['-v', '-ll', 'DEBUG']),
29        (['-vv']),
30        (['-vv', '-ll', 'DEBUG']),
31    ]
32
33    @classmethod
34    def setup_class(cls):
35        apath = os.path.join(ZEPHYR_BASE, 'scripts', 'twister')
36        cls.loader = importlib.machinery.SourceFileLoader('__main__', apath)
37        cls.spec = importlib.util.spec_from_loader(cls.loader.name, cls.loader)
38        cls.twister_module = importlib.util.module_from_spec(cls.spec)
39
40    @classmethod
41    def teardown_class(cls):
42        pass
43
44    @pytest.mark.parametrize(
45        'flag, expect_paths',
46        [
47            ('--no-detailed-test-id', False),
48            ('--detailed-test-id', True)
49        ],
50        ids=['no-detailed-test-id', 'detailed-test-id']
51    )
52    def test_detailed_test_id(self, out_path, flag, expect_paths):
53        test_platforms = ['qemu_x86', 'intel_adl_crb']
54        path = os.path.join(TEST_DATA, 'tests', 'dummy')
55        args = ['-i', '--outdir', out_path, '-T', path, '-y'] + \
56               [flag] + \
57               [val for pair in zip(
58                   ['-p'] * len(test_platforms), test_platforms
59               ) for val in pair]
60
61        with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
62                pytest.raises(SystemExit) as sys_exit:
63            self.loader.exec_module(self.twister_module)
64
65        assert str(sys_exit.value) == '0'
66
67        with open(os.path.join(out_path, 'testplan.json')) as f:
68            j = json.load(f)
69        filtered_j = [
70            (ts['platform'], ts['name'], tc['identifier']) \
71                for ts in j['testsuites'] \
72                for tc in ts['testcases'] if 'reason' not in tc
73        ]
74
75        assert len(filtered_j) > 0, "No dummy tests found."
76
77        expected_start = os.path.relpath(TEST_DATA, ZEPHYR_BASE) if expect_paths else 'dummy.'
78        assert all([testsuite.startswith(expected_start) for _, testsuite, _ in filtered_j])
79        if expect_paths:
80            assert all([(tc_name.count('.') > 1) for _, _, tc_name in filtered_j])
81        else:
82            assert all([(tc_name.count('.') == 1) for _, _, tc_name in filtered_j])
83
84
85    def test_inline_logs(self, out_path):
86        test_platforms = ['qemu_x86', 'intel_adl_crb']
87        path = os.path.join(TEST_DATA, 'tests', 'always_build_error', 'dummy')
88        args = ['--outdir', out_path, '-T', path] + \
89               [val for pair in zip(
90                   ['-p'] * len(test_platforms), test_platforms
91               ) for val in pair]
92
93        with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
94                pytest.raises(SystemExit) as sys_exit:
95            self.loader.exec_module(self.twister_module)
96
97        assert str(sys_exit.value) == '1'
98
99        rel_path = os.path.relpath(path, ZEPHYR_BASE)
100        build_path = os.path.join(out_path, 'qemu_x86_atom', rel_path, 'always_fail.dummy', 'build.log')
101        with open(build_path) as f:
102            build_log = f.read()
103
104        clear_log_in_test()
105
106        args = ['--outdir', out_path, '-T', path] + \
107               ['--inline-logs'] + \
108               [val for pair in zip(
109                   ['-p'] * len(test_platforms), test_platforms
110               ) for val in pair]
111
112        with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
113                pytest.raises(SystemExit) as sys_exit:
114            self.loader.exec_module(self.twister_module)
115
116        assert str(sys_exit.value) == '1'
117
118        with open(os.path.join(out_path, 'twister.log')) as f:
119            inline_twister_log = f.read()
120
121        # Remove information that differs between the runs
122        removal_patterns = [
123            # Remove tmp filepaths, as they will differ
124            r'(/|\\)tmp(/|\\)\S+',
125            # Remove object creation order, as it can change
126            r'^\[[0-9]+/[0-9]+\] ',
127            # Remove variable CMake flag
128            r'-DTC_RUNID=[0-9a-zA-Z]+',
129            # Remove variable order CMake flags
130            r'-I[0-9a-zA-Z/\\]+',
131            # Remove duration-sensitive entries
132            r'-- Configuring done \([0-9.]+s\)',
133            r'-- Generating done \([0-9.]+s\)',
134            # Cache location may vary between CI runs
135            r'^.*-- Cache files will be written to:.*$'
136        ]
137        for pattern in removal_patterns:
138            c_pattern = re.compile(pattern, flags=re.MULTILINE)
139            inline_twister_log = re.sub(c_pattern, '', inline_twister_log)
140            build_log = re.sub(c_pattern, '', build_log)
141
142        split_build_log = build_log.split('\n')
143        for r in split_build_log:
144            assert r in inline_twister_log
145
146    def _get_matches(self, err, regex_line):
147        matches = []
148        for line in err.split('\n'):
149            columns = line.split()
150            if len(columns) == 8:
151                for i in range(8):
152                    match = re.fullmatch(regex_line[i], columns[i])
153                    if match:
154                        matches.append(match)
155                if len(matches) == 8:
156                    return matches
157                else:
158                    matches = []
159        return matches
160
161
162    @pytest.mark.parametrize(
163        'flags',
164        TESTDATA_1,
165        ids=['not verbose', 'not verbose + debug', 'v', 'v + debug', 'vv', 'vv + debug']
166    )
167    def test_output_levels(self, capfd, out_path, flags):
168        test_path = os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic')
169        args = ['--outdir', out_path, '-T', test_path, *flags]
170
171        with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
172            pytest.raises(SystemExit) as sys_exit:
173            self.loader.exec_module(self.twister_module)
174
175        out, err = capfd.readouterr()
176        sys.stdout.write(out)
177        sys.stderr.write(err)
178
179        assert str(sys_exit.value) == '0'
180
181        regex_debug_line = r'^\s*DEBUG'
182        debug_matches = re.search(regex_debug_line, err, re.MULTILINE)
183        if '-ll' in flags and 'DEBUG' in flags:
184            assert debug_matches is not None
185        else:
186            assert debug_matches is None
187
188        # Summary requires verbosity > 1
189        if '-vv' in flags:
190            assert 'Total test suites: ' in out
191        else:
192            assert 'Total test suites: ' not in out
193
194        # Brief summary shows up only on verbosity 0 - instance-by-instance otherwise
195        regex_info_line = [r'INFO', r'-', r'\d+/\d+', r'\S+', r'\S+', r'[A-Z]+', r'\(\w+', r'[\d.]+s\)']
196        info_matches = self._get_matches(err, regex_info_line)
197        if not any(f in flags for f in ['-v', '-vv']):
198            assert not info_matches
199        else:
200            assert info_matches
201