1#!/usr/bin/env python3
2
3# Copyright (c) 2018,2020 Intel Corporation
4# Copyright (c) 2022 Nordic Semiconductor ASA
5# SPDX-License-Identifier: Apache-2.0
6
7import argparse
8import collections
9from email.utils import parseaddr
10from itertools import takewhile
11import json
12import logging
13import os
14from pathlib import Path
15import platform
16import re
17import subprocess
18import sys
19import tempfile
20import traceback
21import shlex
22import shutil
23import textwrap
24import unidiff
25
26from yamllint import config, linter
27
28from junitparser import TestCase, TestSuite, JUnitXml, Skipped, Error, Failure
29import magic
30
31from west.manifest import Manifest
32from west.manifest import ManifestProject
33
34sys.path.insert(0, str(Path(__file__).resolve().parents[1]))
35from get_maintainer import Maintainers, MaintainersError
36import list_boards
37import list_hardware
38
39logger = None
40
41def git(*args, cwd=None, ignore_non_zero=False):
42    # Helper for running a Git command. Returns the rstrip()ed stdout output.
43    # Called like git("diff"). Exits with SystemError (raised by sys.exit()) on
44    # errors if 'ignore_non_zero' is set to False (default: False). 'cwd' is the
45    # working directory to use (default: current directory).
46
47    git_cmd = ("git",) + args
48    try:
49        cp = subprocess.run(git_cmd, capture_output=True, cwd=cwd)
50    except OSError as e:
51        err(f"failed to run '{cmd2str(git_cmd)}': {e}")
52
53    if not ignore_non_zero and (cp.returncode or cp.stderr):
54        err(f"'{cmd2str(git_cmd)}' exited with status {cp.returncode} and/or "
55            f"wrote to stderr.\n"
56            f"==stdout==\n"
57            f"{cp.stdout.decode('utf-8')}\n"
58            f"==stderr==\n"
59            f"{cp.stderr.decode('utf-8')}\n")
60
61    return cp.stdout.decode("utf-8").rstrip()
62
63def get_shas(refspec):
64    """
65    Returns the list of Git SHAs for 'refspec'.
66
67    :param refspec:
68    :return:
69    """
70    return git('rev-list',
71               f'--max-count={-1 if "." in refspec else 1}', refspec).split()
72
73def get_files(filter=None, paths=None):
74    filter_arg = (f'--diff-filter={filter}',) if filter else ()
75    paths_arg = ('--', *paths) if paths else ()
76    out = git('diff', '--name-only', *filter_arg, COMMIT_RANGE, *paths_arg)
77    files = out.splitlines()
78    for file in list(files):
79        if not os.path.isfile(os.path.join(GIT_TOP, file)):
80            # Drop submodule directories from the list.
81            files.remove(file)
82    return files
83
84class FmtdFailure(Failure):
85    def __init__(
86        self, severity, title, file, line=None, col=None, desc="", end_line=None, end_col=None
87    ):
88        self.severity = severity
89        self.title = title
90        self.file = file
91        self.line = line
92        self.col = col
93        self.end_line = end_line
94        self.end_col = end_col
95        self.desc = desc
96        description = f':{desc}' if desc else ''
97        msg_body = desc or title
98
99        txt = f'\n{title}{description}\nFile:{file}' + \
100              (f'\nLine:{line}' if line else '') + \
101              (f'\nColumn:{col}' if col else '') + \
102              (f'\nEndLine:{end_line}' if end_line else '') + \
103              (f'\nEndColumn:{end_col}' if end_col else '')
104        msg = f'{file}' + (f':{line}' if line else '') + f' {msg_body}'
105        typ = severity.lower()
106
107        super().__init__(msg, typ)
108
109        self.text = txt
110
111
112class ComplianceTest:
113    """
114    Base class for tests. Inheriting classes should have a run() method and set
115    these class variables:
116
117    name:
118      Test name
119
120    doc:
121      Link to documentation related to what's being tested
122
123    path_hint:
124      The path the test runs itself in. This is just informative and used in
125      the message that gets printed when running the test.
126
127      There are two magic strings that can be used instead of a path:
128      - The magic string "<zephyr-base>" can be used to refer to the
129      environment variable ZEPHYR_BASE or, when missing, the calculated base of
130      the zephyr tree
131      - The magic string "<git-top>" refers to the top-level repository
132      directory. This avoids running 'git' to find the top-level directory
133      before main() runs (class variable assignments run when the 'class ...'
134      statement runs). That avoids swallowing errors, because main() reports
135      them to GitHub
136    """
137    def __init__(self):
138        self.case = TestCase(type(self).name, "Guidelines")
139        # This is necessary because Failure can be subclassed, but since it is
140        # always restored form the element tree, the subclass is lost upon
141        # restoring
142        self.fmtd_failures = []
143
144    def _result(self, res, text):
145        res.text = text.rstrip()
146        self.case.result += [res]
147
148    def error(self, text, msg=None, type_="error"):
149        """
150        Signals a problem with running the test, with message 'msg'.
151
152        Raises an exception internally, so you do not need to put a 'return'
153        after error().
154        """
155        err = Error(msg or f'{type(self).name} error', type_)
156        self._result(err, text)
157
158        raise EndTest
159
160    def skip(self, text, msg=None, type_="skip"):
161        """
162        Signals that the test should be skipped, with message 'msg'.
163
164        Raises an exception internally, so you do not need to put a 'return'
165        after skip().
166        """
167        skpd = Skipped(msg or f'{type(self).name} skipped', type_)
168        self._result(skpd, text)
169
170        raise EndTest
171
172    def failure(self, text, msg=None, type_="failure"):
173        """
174        Signals that the test failed, with message 'msg'. Can be called many
175        times within the same test to report multiple failures.
176        """
177        fail = Failure(msg or f'{type(self).name} issues', type_)
178        self._result(fail, text)
179
180    def fmtd_failure(
181        self, severity, title, file, line=None, col=None, desc="", end_line=None, end_col=None
182    ):
183        """
184        Signals that the test failed, and store the information in a formatted
185        standardized manner. Can be called many times within the same test to
186        report multiple failures.
187        """
188        fail = FmtdFailure(severity, title, file, line, col, desc, end_line, end_col)
189        self._result(fail, fail.text)
190        self.fmtd_failures.append(fail)
191
192class EndTest(Exception):
193    """
194    Raised by ComplianceTest.error()/skip() to end the test.
195
196    Tests can raise EndTest themselves to immediately end the test, e.g. from
197    within a nested function call.
198    """
199
200
201class CheckPatch(ComplianceTest):
202    """
203    Runs checkpatch and reports found issues
204
205    """
206    name = "Checkpatch"
207    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#coding-style for more details."
208    path_hint = "<git-top>"
209
210    def run(self):
211        checkpatch = os.path.join(ZEPHYR_BASE, 'scripts', 'checkpatch.pl')
212        if not os.path.exists(checkpatch):
213            self.skip(f'{checkpatch} not found')
214
215        diff = subprocess.Popen(('git', 'diff', '--no-ext-diff', COMMIT_RANGE),
216                                stdout=subprocess.PIPE,
217                                cwd=GIT_TOP)
218        try:
219            subprocess.run((checkpatch, '--mailback', '--no-tree', '-'),
220                           check=True,
221                           stdin=diff.stdout,
222                           stdout=subprocess.PIPE,
223                           stderr=subprocess.STDOUT,
224                           shell=True, cwd=GIT_TOP)
225
226        except subprocess.CalledProcessError as ex:
227            output = ex.output.decode("utf-8")
228            regex = r'^\s*\S+:(\d+):\s*(ERROR|WARNING):(.+?):(.+)(?:\n|\r\n?)+' \
229                    r'^\s*#(\d+):\s*FILE:\s*(.+):(\d+):'
230
231            matches = re.findall(regex, output, re.MULTILINE)
232
233            # add a guard here for excessive number of errors, do not try and
234            # process each one of them and instead push this as one failure.
235            if len(matches) > 500:
236                self.failure(output)
237                return
238
239            for m in matches:
240                self.fmtd_failure(m[1].lower(), m[2], m[5], m[6], col=None,
241                        desc=m[3])
242
243            # If the regex has not matched add the whole output as a failure
244            if len(matches) == 0:
245                self.failure(output)
246
247
248class BoardYmlCheck(ComplianceTest):
249    """
250    Check the board.yml files
251    """
252    name = "BoardYml"
253    doc = "Check the board.yml file format"
254    path_hint = "<zephyr-base>"
255
256    def check_board_file(self, file, vendor_prefixes):
257        """Validate a single board file."""
258        with open(file) as fp:
259            for line_num, line in enumerate(fp.readlines(), start=1):
260                if "vendor:" in line:
261                    _, vnd = line.strip().split(":", 2)
262                    vnd = vnd.strip()
263                    if vnd not in vendor_prefixes:
264                        desc = f"invalid vendor: {vnd}"
265                        self.fmtd_failure("error", "BoardYml", file, line_num,
266                                          desc=desc)
267
268    def run(self):
269        vendor_prefixes = ["others"]
270        with open(os.path.join(ZEPHYR_BASE, "dts", "bindings", "vendor-prefixes.txt")) as fp:
271            for line in fp.readlines():
272                line = line.strip()
273                if not line or line.startswith("#"):
274                    continue
275                try:
276                    vendor, _ = line.split("\t", 2)
277                    vendor_prefixes.append(vendor)
278                except ValueError:
279                    self.error(f"Invalid line in vendor-prefixes.txt:\"{line}\".")
280                    self.error("Did you forget the tab character?")
281
282        path = Path(ZEPHYR_BASE)
283        for file in path.glob("**/board.yml"):
284            self.check_board_file(file, vendor_prefixes)
285
286
287class ClangFormatCheck(ComplianceTest):
288    """
289    Check if clang-format reports any issues
290    """
291    name = "ClangFormat"
292    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#clang-format for more details."
293    path_hint = "<git-top>"
294
295    def run(self):
296        exe = f"clang-format-diff.{'exe' if platform.system() == 'Windows' else 'py'}"
297
298        for file in get_files():
299            if Path(file).suffix not in ['.c', '.h']:
300                continue
301
302            diff = subprocess.Popen(('git', 'diff', '-U0', '--no-color', COMMIT_RANGE, '--', file),
303                                    stdout=subprocess.PIPE,
304                                    cwd=GIT_TOP)
305            try:
306                subprocess.run((exe, '-p1'),
307                               check=True,
308                               stdin=diff.stdout,
309                               stdout=subprocess.PIPE,
310                               stderr=subprocess.STDOUT,
311                               cwd=GIT_TOP)
312
313            except subprocess.CalledProcessError as ex:
314                patchset = unidiff.PatchSet.from_string(ex.output, encoding="utf-8")
315                for patch in patchset:
316                    for hunk in patch:
317                        # Strip the before and after context
318                        before = next(i for i,v in enumerate(hunk) if str(v).startswith(('-', '+')))
319                        after = next(i for i,v in enumerate(reversed(hunk)) if str(v).startswith(('-', '+')))
320                        msg = "".join([str(l) for l in hunk[before:-after or None]])
321
322                        # show the hunk at the last line
323                        self.fmtd_failure("notice",
324                                          "You may want to run clang-format on this change",
325                                          file, line=hunk.source_start + hunk.source_length - after,
326                                          desc=f'\r\n{msg}')
327
328
329class DevicetreeBindingsCheck(ComplianceTest):
330    """
331    Checks if we are introducing any unwanted properties in Devicetree Bindings.
332    """
333    name = "DevicetreeBindings"
334    doc = "See https://docs.zephyrproject.org/latest/build/dts/bindings.html for more details."
335    path_hint = "<zephyr-base>"
336
337    def run(self, full=True):
338        dts_bindings = self.parse_dt_bindings()
339
340        for dts_binding in dts_bindings:
341            self.required_false_check(dts_binding)
342
343    def parse_dt_bindings(self):
344        """
345        Returns a list of dts/bindings/**/*.yaml files
346        """
347
348        dt_bindings = []
349        for file_name in get_files(filter="d"):
350            if 'dts/bindings/' in file_name and file_name.endswith('.yaml'):
351                dt_bindings.append(file_name)
352
353        return dt_bindings
354
355    def required_false_check(self, dts_binding):
356        with open(dts_binding) as file:
357            for line_number, line in enumerate(file, 1):
358                if 'required: false' in line:
359                    self.fmtd_failure(
360                        'warning', 'Devicetree Bindings', dts_binding,
361                        line_number, col=None,
362                        desc="'required: false' is redundant, please remove")
363
364
365class KconfigCheck(ComplianceTest):
366    """
367    Checks is we are introducing any new warnings/errors with Kconfig,
368    for example using undefined Kconfig variables.
369    """
370    name = "Kconfig"
371    doc = "See https://docs.zephyrproject.org/latest/build/kconfig/tips.html for more details."
372    path_hint = "<zephyr-base>"
373
374    def run(self, full=True, no_modules=False, filename="Kconfig", hwm=None):
375        self.no_modules = no_modules
376
377        kconf = self.parse_kconfig(filename=filename, hwm=hwm)
378
379        self.check_top_menu_not_too_long(kconf)
380        self.check_no_pointless_menuconfigs(kconf)
381        self.check_no_undef_within_kconfig(kconf)
382        self.check_no_redefined_in_defconfig(kconf)
383        self.check_no_enable_in_boolean_prompt(kconf)
384        self.check_soc_name_sync(kconf)
385        if full:
386            self.check_no_undef_outside_kconfig(kconf)
387
388    def get_modules(self, modules_file, settings_file):
389        """
390        Get a list of modules and put them in a file that is parsed by
391        Kconfig
392
393        This is needed to complete Kconfig sanity tests.
394
395        """
396        if self.no_modules:
397            with open(modules_file, 'w') as fp_module_file:
398                fp_module_file.write("# Empty\n")
399            return
400
401        # Invoke the script directly using the Python executable since this is
402        # not a module nor a pip-installed Python utility
403        zephyr_module_path = os.path.join(ZEPHYR_BASE, "scripts",
404                                          "zephyr_module.py")
405        cmd = [sys.executable, zephyr_module_path,
406               '--kconfig-out', modules_file, '--settings-out', settings_file]
407        try:
408            subprocess.run(cmd, check=True, stdout=subprocess.PIPE,
409                           stderr=subprocess.STDOUT)
410        except subprocess.CalledProcessError as ex:
411            self.error(ex.output.decode("utf-8"))
412
413        modules_dir = ZEPHYR_BASE + '/modules'
414        modules = [name for name in os.listdir(modules_dir) if
415                   os.path.exists(os.path.join(modules_dir, name, 'Kconfig'))]
416
417        with open(modules_file, 'r') as fp_module_file:
418            content = fp_module_file.read()
419
420        with open(modules_file, 'w') as fp_module_file:
421            for module in modules:
422                fp_module_file.write("ZEPHYR_{}_KCONFIG = {}\n".format(
423                    re.sub('[^a-zA-Z0-9]', '_', module).upper(),
424                    modules_dir + '/' + module + '/Kconfig'
425                ))
426            fp_module_file.write(content)
427
428    def get_module_setting_root(self, root, settings_file):
429        """
430        Parse the Zephyr module generated settings file given by 'settings_file'
431        and return all root settings defined by 'root'.
432        """
433        # Invoke the script directly using the Python executable since this is
434        # not a module nor a pip-installed Python utility
435        root_paths = []
436
437        if os.path.exists(settings_file):
438            with open(settings_file, 'r') as fp_setting_file:
439                content = fp_setting_file.read()
440
441            lines = content.strip().split('\n')
442            for line in lines:
443                root = root.upper()
444                if line.startswith(f'"{root}_ROOT":'):
445                    _, root_path = line.split(":", 1)
446                    root_paths.append(Path(root_path.strip('"')))
447        return root_paths
448
449    def get_kconfig_dts(self, kconfig_dts_file, settings_file):
450        """
451        Generate the Kconfig.dts using dts/bindings as the source.
452
453        This is needed to complete Kconfig compliance tests.
454
455        """
456        # Invoke the script directly using the Python executable since this is
457        # not a module nor a pip-installed Python utility
458        zephyr_drv_kconfig_path = os.path.join(ZEPHYR_BASE, "scripts", "dts",
459                                               "gen_driver_kconfig_dts.py")
460        binding_paths = []
461        binding_paths.append(os.path.join(ZEPHYR_BASE, "dts", "bindings"))
462
463        dts_root_paths = self.get_module_setting_root('dts', settings_file)
464        for p in dts_root_paths:
465            binding_paths.append(p / "dts" / "bindings")
466
467        cmd = [sys.executable, zephyr_drv_kconfig_path,
468               '--kconfig-out', kconfig_dts_file, '--bindings-dirs']
469        for binding_path in binding_paths:
470            cmd.append(binding_path)
471        try:
472            subprocess.run(cmd, check=True, stdout=subprocess.PIPE,
473                           stderr=subprocess.STDOUT)
474        except subprocess.CalledProcessError as ex:
475            self.error(ex.output.decode("utf-8"))
476
477    def get_v1_model_syms(self, kconfig_v1_file, kconfig_v1_syms_file):
478        """
479        Generate a symbol define Kconfig file.
480        This function creates a file with all Kconfig symbol definitions from
481        old boards model so that those symbols will not appear as undefined
482        symbols in hardware model v2.
483
484        This is needed to complete Kconfig compliance tests.
485        """
486        os.environ['HWM_SCHEME'] = 'v1'
487        # 'kconfiglib' is global
488        # pylint: disable=undefined-variable
489
490        try:
491            kconf_v1 = kconfiglib.Kconfig(filename=kconfig_v1_file, warn=False)
492        except kconfiglib.KconfigError as e:
493            self.failure(str(e))
494            raise EndTest
495
496        with open(kconfig_v1_syms_file, 'w') as fp_kconfig_v1_syms_file:
497            for s in kconf_v1.defined_syms:
498                if s.type != kconfiglib.UNKNOWN:
499                    fp_kconfig_v1_syms_file.write('config ' + s.name)
500                    fp_kconfig_v1_syms_file.write('\n\t' + kconfiglib.TYPE_TO_STR[s.type])
501                    fp_kconfig_v1_syms_file.write('\n\n')
502
503    def get_v2_model(self, kconfig_dir, settings_file):
504        """
505        Get lists of v2 boards and SoCs and put them in a file that is parsed by
506        Kconfig
507
508        This is needed to complete Kconfig sanity tests.
509        """
510        os.environ['HWM_SCHEME'] = 'v2'
511        kconfig_file = os.path.join(kconfig_dir, 'boards', 'Kconfig')
512        kconfig_boards_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.boards')
513        kconfig_defconfig_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.defconfig')
514
515        board_roots = self.get_module_setting_root('board', settings_file)
516        board_roots.insert(0, Path(ZEPHYR_BASE))
517        soc_roots = self.get_module_setting_root('soc', settings_file)
518        soc_roots.insert(0, Path(ZEPHYR_BASE))
519        root_args = argparse.Namespace(**{'board_roots': board_roots,
520                                          'soc_roots': soc_roots, 'board': None,
521                                          'board_dir': []})
522        v2_boards = list_boards.find_v2_boards(root_args).values()
523
524        with open(kconfig_defconfig_file, 'w') as fp:
525            for board in v2_boards:
526                for board_dir in board.directories:
527                    fp.write('osource "' + (board_dir / 'Kconfig.defconfig').as_posix() + '"\n')
528
529        with open(kconfig_boards_file, 'w') as fp:
530            for board in v2_boards:
531                board_str = 'BOARD_' + re.sub(r"[^a-zA-Z0-9_]", "_", board.name).upper()
532                fp.write('config  ' + board_str + '\n')
533                fp.write('\t bool\n')
534                for qualifier in list_boards.board_v2_qualifiers(board):
535                    board_str = ('BOARD_' + board.name + '_' +
536                                 re.sub(r"[^a-zA-Z0-9_]", "_", qualifier)).upper()
537                    fp.write('config  ' + board_str + '\n')
538                    fp.write('\t bool\n')
539                for board_dir in board.directories:
540                    fp.write(
541                        'source "' + (board_dir / ('Kconfig.' + board.name)).as_posix() + '"\n'
542                    )
543
544        with open(kconfig_file, 'w') as fp:
545            fp.write(
546                'osource "' + (Path(kconfig_dir) / 'boards' / 'Kconfig.syms.v1').as_posix() + '"\n'
547            )
548            for board in v2_boards:
549                for board_dir in board.directories:
550                    fp.write('osource "' + (board_dir / 'Kconfig').as_posix() + '"\n')
551
552        kconfig_defconfig_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.defconfig')
553        kconfig_soc_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.soc')
554        kconfig_file = os.path.join(kconfig_dir, 'soc', 'Kconfig')
555
556        root_args = argparse.Namespace(**{'soc_roots': soc_roots})
557        v2_systems = list_hardware.find_v2_systems(root_args)
558
559        soc_folders = {folder for soc in v2_systems.get_socs() for folder in soc.folder}
560        with open(kconfig_defconfig_file, 'w') as fp:
561            for folder in soc_folders:
562                fp.write('osource "' + (Path(folder) / 'Kconfig.defconfig').as_posix() + '"\n')
563
564        with open(kconfig_soc_file, 'w') as fp:
565            for folder in soc_folders:
566                fp.write('source "' + (Path(folder) / 'Kconfig.soc').as_posix() + '"\n')
567
568        with open(kconfig_file, 'w') as fp:
569            for folder in soc_folders:
570                fp.write('source "' + (Path(folder) / 'Kconfig').as_posix() + '"\n')
571
572        kconfig_file = os.path.join(kconfig_dir, 'arch', 'Kconfig')
573
574        root_args = argparse.Namespace(**{'arch_roots': [Path(ZEPHYR_BASE)], 'arch': None})
575        v2_archs = list_hardware.find_v2_archs(root_args)
576
577        with open(kconfig_file, 'w') as fp:
578            for arch in v2_archs['archs']:
579                fp.write('source "' + (Path(arch['path']) / 'Kconfig').as_posix() + '"\n')
580
581    def parse_kconfig(self, filename="Kconfig", hwm=None):
582        """
583        Returns a kconfiglib.Kconfig object for the Kconfig files. We reuse
584        this object for all tests to avoid having to reparse for each test.
585        """
586        # Put the Kconfiglib path first to make sure no local Kconfiglib version is
587        # used
588        kconfig_path = os.path.join(ZEPHYR_BASE, "scripts", "kconfig")
589        if not os.path.exists(kconfig_path):
590            self.error(kconfig_path + " not found")
591
592        kconfiglib_dir = tempfile.mkdtemp(prefix="kconfiglib_")
593
594        sys.path.insert(0, kconfig_path)
595        # Import globally so that e.g. kconfiglib.Symbol can be referenced in
596        # tests
597        global kconfiglib
598        import kconfiglib
599
600        # Look up Kconfig files relative to ZEPHYR_BASE
601        os.environ["srctree"] = ZEPHYR_BASE
602
603        # Parse the entire Kconfig tree, to make sure we see all symbols
604        os.environ["SOC_DIR"] = "soc/"
605        os.environ["ARCH_DIR"] = "arch/"
606        os.environ["BOARD"] = "boards"
607        os.environ["ARCH"] = "*"
608        os.environ["KCONFIG_BINARY_DIR"] = kconfiglib_dir
609        os.environ['DEVICETREE_CONF'] = "dummy"
610        os.environ['TOOLCHAIN_HAS_NEWLIB'] = "y"
611
612        # Older name for DEVICETREE_CONF, for compatibility with older Zephyr
613        # versions that don't have the renaming
614        os.environ["GENERATED_DTS_BOARD_CONF"] = "dummy"
615
616        # For multi repo support
617        self.get_modules(os.path.join(kconfiglib_dir, "Kconfig.modules"),
618                         os.path.join(kconfiglib_dir, "settings_file.txt"))
619        # For Kconfig.dts support
620        self.get_kconfig_dts(os.path.join(kconfiglib_dir, "Kconfig.dts"),
621                             os.path.join(kconfiglib_dir, "settings_file.txt"))
622
623        # To make compliance work with old hw model and HWMv2 simultaneously.
624        kconfiglib_boards_dir = os.path.join(kconfiglib_dir, 'boards')
625        os.makedirs(kconfiglib_boards_dir, exist_ok=True)
626        os.makedirs(os.path.join(kconfiglib_dir, 'soc'), exist_ok=True)
627        os.makedirs(os.path.join(kconfiglib_dir, 'arch'), exist_ok=True)
628
629        os.environ["KCONFIG_BOARD_DIR"] = kconfiglib_boards_dir
630        self.get_v2_model(kconfiglib_dir, os.path.join(kconfiglib_dir, "settings_file.txt"))
631
632        # Tells Kconfiglib to generate warnings for all references to undefined
633        # symbols within Kconfig files
634        os.environ["KCONFIG_WARN_UNDEF"] = "y"
635
636        try:
637            # Note this will both print warnings to stderr _and_ return
638            # them: so some warnings might get printed
639            # twice. "warn_to_stderr=False" could unfortunately cause
640            # some (other) warnings to never be printed.
641            return kconfiglib.Kconfig(filename=filename)
642        except kconfiglib.KconfigError as e:
643            self.failure(str(e))
644            raise EndTest
645        finally:
646            # Clean up the temporary directory
647            shutil.rmtree(kconfiglib_dir)
648
649    def get_logging_syms(self, kconf):
650        # Returns a set() with the names of the Kconfig symbols generated with
651        # logging template in samples/tests folders. The Kconfig symbols doesn't
652        # include `CONFIG_` and for each module declared there is one symbol
653        # per suffix created.
654
655        suffixes = [
656            "_LOG_LEVEL",
657            "_LOG_LEVEL_DBG",
658            "_LOG_LEVEL_ERR",
659            "_LOG_LEVEL_INF",
660            "_LOG_LEVEL_WRN",
661            "_LOG_LEVEL_OFF",
662            "_LOG_LEVEL_INHERIT",
663            "_LOG_LEVEL_DEFAULT",
664        ]
665
666        # Warning: Needs to work with both --perl-regexp and the 're' module.
667        regex = r"^\s*(?:module\s*=\s*)([A-Z0-9_]+)\s*(?:#|$)"
668
669        # Grep samples/ and tests/ for symbol definitions
670        grep_stdout = git("grep", "-I", "-h", "--perl-regexp", regex, "--",
671                          ":samples", ":tests", cwd=ZEPHYR_BASE)
672
673        names = re.findall(regex, grep_stdout, re.MULTILINE)
674
675        kconf_syms = []
676        for name in names:
677            for suffix in suffixes:
678                kconf_syms.append(f"{name}{suffix}")
679
680        return set(kconf_syms)
681
682    def get_defined_syms(self, kconf):
683        # Returns a set() with the names of all defined Kconfig symbols (with no
684        # 'CONFIG_' prefix). This is complicated by samples and tests defining
685        # their own Kconfig trees. For those, just grep for 'config FOO' to find
686        # definitions. Doing it "properly" with Kconfiglib is still useful for
687        # the main tree, because some symbols are defined using preprocessor
688        # macros.
689
690        # Warning: Needs to work with both --perl-regexp and the 're' module.
691        # (?:...) is a non-capturing group.
692        regex = r"^\s*(?:menu)?config\s*([A-Z0-9_]+)\s*(?:#|$)"
693
694        # Grep samples/ and tests/ for symbol definitions
695        grep_stdout = git("grep", "-I", "-h", "--perl-regexp", regex, "--",
696                          ":samples", ":tests", cwd=ZEPHYR_BASE)
697
698        # Generate combined list of configs and choices from the main Kconfig tree.
699        kconf_syms = kconf.unique_defined_syms + kconf.unique_choices
700
701        # Symbols from the main Kconfig tree + grepped definitions from samples
702        # and tests
703        return set(
704            [sym.name for sym in kconf_syms]
705            + re.findall(regex, grep_stdout, re.MULTILINE)
706        ).union(self.get_logging_syms(kconf))
707
708    def check_top_menu_not_too_long(self, kconf):
709        """
710        Checks that there aren't too many items in the top-level menu (which
711        might be a sign that stuff accidentally got added there)
712        """
713        max_top_items = 50
714
715        n_top_items = 0
716        node = kconf.top_node.list
717        while node:
718            # Only count items with prompts. Other items will never be
719            # shown in the menuconfig (outside show-all mode).
720            if node.prompt:
721                n_top_items += 1
722            node = node.next
723
724        if n_top_items > max_top_items:
725            self.failure(f"""
726Expected no more than {max_top_items} potentially visible items (items with
727prompts) in the top-level Kconfig menu, found {n_top_items} items. If you're
728deliberately adding new entries, then bump the 'max_top_items' variable in
729{__file__}.""")
730
731    def check_no_redefined_in_defconfig(self, kconf):
732        # Checks that no symbols are (re)defined in defconfigs.
733
734        for node in kconf.node_iter():
735            # 'kconfiglib' is global
736            # pylint: disable=undefined-variable
737            if "defconfig" in node.filename and (node.prompt or node.help):
738                name = (node.item.name if node.item not in
739                        (kconfiglib.MENU, kconfiglib.COMMENT) else str(node))
740                self.failure(f"""
741Kconfig node '{name}' found with prompt or help in {node.filename}.
742Options must not be defined in defconfig files.
743""")
744                continue
745
746    def check_no_enable_in_boolean_prompt(self, kconf):
747        # Checks that boolean's prompt does not start with "Enable...".
748
749        for node in kconf.node_iter():
750            # skip Kconfig nodes not in-tree (will present an absolute path)
751            if os.path.isabs(node.filename):
752                continue
753
754            # 'kconfiglib' is global
755            # pylint: disable=undefined-variable
756
757            # only process boolean symbols with a prompt
758            if (not isinstance(node.item, kconfiglib.Symbol) or
759                node.item.type != kconfiglib.BOOL or
760                not node.prompt or
761                not node.prompt[0]):
762                continue
763
764            if re.match(r"^[Ee]nable.*", node.prompt[0]):
765                self.failure(f"""
766Boolean option '{node.item.name}' prompt must not start with 'Enable...'. Please
767check Kconfig guidelines.
768""")
769                continue
770
771    def check_no_pointless_menuconfigs(self, kconf):
772        # Checks that there are no pointless 'menuconfig' symbols without
773        # children in the Kconfig files
774
775        bad_mconfs = []
776        for node in kconf.node_iter():
777            # 'kconfiglib' is global
778            # pylint: disable=undefined-variable
779
780            # Avoid flagging empty regular menus and choices, in case people do
781            # something with 'osource' (could happen for 'menuconfig' symbols
782            # too, though it's less likely)
783            if node.is_menuconfig and not node.list and \
784               isinstance(node.item, kconfiglib.Symbol):
785
786                bad_mconfs.append(node)
787
788        if bad_mconfs:
789            self.failure("""\
790Found pointless 'menuconfig' symbols without children. Use regular 'config'
791symbols instead. See
792https://docs.zephyrproject.org/latest/build/kconfig/tips.html#menuconfig-symbols.
793
794""" + "\n".join(f"{node.item.name:35} {node.filename}:{node.linenr}"
795                for node in bad_mconfs))
796
797    def check_no_undef_within_kconfig(self, kconf):
798        """
799        Checks that there are no references to undefined Kconfig symbols within
800        the Kconfig files
801        """
802        undef_ref_warnings = "\n\n\n".join(warning for warning in kconf.warnings
803                                           if "undefined symbol" in warning)
804
805        if undef_ref_warnings:
806            self.failure(f"Undefined Kconfig symbols:\n\n {undef_ref_warnings}")
807
808    def check_soc_name_sync(self, kconf):
809        root_args = argparse.Namespace(**{'soc_roots': [Path(ZEPHYR_BASE)]})
810        v2_systems = list_hardware.find_v2_systems(root_args)
811
812        soc_names = {soc.name for soc in v2_systems.get_socs()}
813
814        soc_kconfig_names = set()
815        for node in kconf.node_iter():
816            # 'kconfiglib' is global
817            # pylint: disable=undefined-variable
818            if isinstance(node.item, kconfiglib.Symbol) and node.item.name == "SOC":
819                n = node.item
820                for d in n.defaults:
821                    soc_kconfig_names.add(d[0].name)
822
823        soc_name_warnings = []
824        for name in soc_names:
825            if name not in soc_kconfig_names:
826                soc_name_warnings.append(f"soc name: {name} not found in CONFIG_SOC defaults.")
827
828        if soc_name_warnings:
829            soc_name_warning_str = '\n'.join(soc_name_warnings)
830            self.failure(f'''
831Missing SoC names or CONFIG_SOC vs soc.yml out of sync:
832
833{soc_name_warning_str}
834''')
835
836    def check_no_undef_outside_kconfig(self, kconf):
837        """
838        Checks that there are no references to undefined Kconfig symbols
839        outside Kconfig files (any CONFIG_FOO where no FOO symbol exists)
840        """
841        # Grep for symbol references.
842        #
843        # Example output line for a reference to CONFIG_FOO at line 17 of
844        # foo/bar.c:
845        #
846        #   foo/bar.c<null>17<null>#ifdef CONFIG_FOO
847        #
848        # 'git grep --only-matching' would get rid of the surrounding context
849        # ('#ifdef '), but it was added fairly recently (second half of 2018),
850        # so we extract the references from each line ourselves instead.
851        #
852        # The regex uses word boundaries (\b) to isolate the reference, and
853        # negative lookahead to automatically whitelist the following:
854        #
855        #  - ##, for token pasting (CONFIG_FOO_##X)
856        #
857        #  - $, e.g. for CMake variable expansion (CONFIG_FOO_${VAR})
858        #
859        #  - @, e.g. for CMakes's configure_file() (CONFIG_FOO_@VAR@)
860        #
861        #  - {, e.g. for Python scripts ("CONFIG_FOO_{}_BAR".format(...)")
862        #
863        #  - *, meant for comments like '#endif /* CONFIG_FOO_* */
864
865        defined_syms = self.get_defined_syms(kconf)
866
867        # Maps each undefined symbol to a list <filename>:<linenr> strings
868        undef_to_locs = collections.defaultdict(list)
869
870        # Warning: Needs to work with both --perl-regexp and the 're' module
871        regex = r"\bCONFIG_[A-Z0-9_]+\b(?!\s*##|[$@{(.*])"
872
873        # Skip doc/releases and doc/security/vulnerabilities.rst, which often
874        # reference removed symbols
875        grep_stdout = git("grep", "--line-number", "-I", "--null",
876                          "--perl-regexp", regex, "--", ":!/doc/releases",
877                          ":!/doc/security/vulnerabilities.rst",
878                          cwd=Path(GIT_TOP))
879
880        # splitlines() supports various line terminators
881        for grep_line in grep_stdout.splitlines():
882            path, lineno, line = grep_line.split("\0")
883
884            # Extract symbol references (might be more than one) within the
885            # line
886            for sym_name in re.findall(regex, line):
887                sym_name = sym_name[7:]  # Strip CONFIG_
888                if sym_name not in defined_syms and \
889                   sym_name not in self.UNDEF_KCONFIG_ALLOWLIST and \
890                   not (sym_name.endswith("_MODULE") and sym_name[:-7] in defined_syms):
891
892                    undef_to_locs[sym_name].append(f"{path}:{lineno}")
893
894        if not undef_to_locs:
895            return
896
897        # String that describes all referenced but undefined Kconfig symbols,
898        # in alphabetical order, along with the locations where they're
899        # referenced. Example:
900        #
901        #   CONFIG_ALSO_MISSING    arch/xtensa/core/fatal.c:273
902        #   CONFIG_MISSING         arch/xtensa/core/fatal.c:264, subsys/fb/cfb.c:20
903        undef_desc = "\n".join(f"CONFIG_{sym_name:35} {', '.join(locs)}"
904            for sym_name, locs in sorted(undef_to_locs.items()))
905
906        self.failure(f"""
907Found references to undefined Kconfig symbols. If any of these are false
908positives, then add them to UNDEF_KCONFIG_ALLOWLIST in {__file__}.
909
910If the reference is for a comment like /* CONFIG_FOO_* */ (or
911/* CONFIG_FOO_*_... */), then please use exactly that form (with the '*'). The
912CI check knows not to flag it.
913
914More generally, a reference followed by $, @, {{, (, ., *, or ## will never be
915flagged.
916
917{undef_desc}""")
918
919    # Many of these are symbols used as examples. Note that the list is sorted
920    # alphabetically, and skips the CONFIG_ prefix.
921    UNDEF_KCONFIG_ALLOWLIST = {
922        # zephyr-keep-sorted-start re(^\s+")
923        "ALSO_MISSING",
924        "APP_LINK_WITH_",
925        "APP_LOG_LEVEL", # Application log level is not detected correctly as
926                         # the option is defined using a template, so it can't
927                         # be grepped
928        "APP_LOG_LEVEL_DBG",
929        "ARMCLANG_STD_LIBC",  # The ARMCLANG_STD_LIBC is defined in the
930                              # toolchain Kconfig which is sourced based on
931                              # Zephyr toolchain variant and therefore not
932                              # visible to compliance.
933        "BINDESC_", # Used in documentation as a prefix
934        "BOARD_", # Used as regex in scripts/utils/board_v1_to_v2.py
935        "BOARD_MPS2_AN521_CPUTEST", # Used for board and SoC extension feature tests
936        "BOARD_NATIVE_SIM_NATIVE_64_TWO", # Used for board and SoC extension feature tests
937        "BOARD_NATIVE_SIM_NATIVE_ONE", # Used for board and SoC extension feature tests
938        "BOOT_DIRECT_XIP", # Used in sysbuild for MCUboot configuration
939        "BOOT_DIRECT_XIP_REVERT", # Used in sysbuild for MCUboot configuration
940        "BOOT_ENCRYPTION_KEY_FILE", # Used in sysbuild
941        "BOOT_ENCRYPT_IMAGE", # Used in sysbuild
942        "BOOT_FIRMWARE_LOADER", # Used in sysbuild for MCUboot configuration
943        "BOOT_MAX_IMG_SECTORS_AUTO", # Used in sysbuild
944        "BOOT_RAM_LOAD", # Used in sysbuild for MCUboot configuration
945        "BOOT_SERIAL_BOOT_MODE",     # Used in (sysbuild-based) test/
946                                     # documentation
947        "BOOT_SERIAL_CDC_ACM",       # Used in (sysbuild-based) test
948        "BOOT_SERIAL_ENTRANCE_GPIO", # Used in (sysbuild-based) test
949        "BOOT_SERIAL_IMG_GRP_HASH",  # Used in documentation
950        "BOOT_SHARE_BACKEND_RETENTION", # Used in Kconfig text
951        "BOOT_SHARE_DATA",           # Used in Kconfig text
952        "BOOT_SHARE_DATA_BOOTINFO", # Used in (sysbuild-based) test
953        "BOOT_SIGNATURE_KEY_FILE",   # MCUboot setting used by sysbuild
954        "BOOT_SIGNATURE_TYPE_ECDSA_P256", # MCUboot setting used by sysbuild
955        "BOOT_SIGNATURE_TYPE_ED25519",    # MCUboot setting used by sysbuild
956        "BOOT_SIGNATURE_TYPE_NONE",       # MCUboot setting used by sysbuild
957        "BOOT_SIGNATURE_TYPE_RSA",        # MCUboot setting used by sysbuild
958        "BOOT_SWAP_USING_MOVE", # Used in sysbuild for MCUboot configuration
959        "BOOT_SWAP_USING_SCRATCH", # Used in sysbuild for MCUboot configuration
960        "BOOT_UPGRADE_ONLY", # Used in example adjusting MCUboot config, but
961                             # symbol is defined in MCUboot itself.
962        "BOOT_VALIDATE_SLOT0",       # Used in (sysbuild-based) test
963        "BOOT_WATCHDOG_FEED",        # Used in (sysbuild-based) test
964        "BT_6LOWPAN",  # Defined in Linux, mentioned in docs
965        "CDC_ACM_PORT_NAME_",
966        "CHRE",  # Optional module
967        "CHRE_LOG_LEVEL_DBG",  # Optional module
968        "CLOCK_STM32_SYSCLK_SRC_",
969        "CMD_CACHE",  # Defined in U-Boot, mentioned in docs
970        "CMU",
971        "COMPILER_RT_RTLIB",
972        "CRC",  # Used in TI CC13x2 / CC26x2 SDK comment
973        "DEEP_SLEEP",  # #defined by RV32M1 in ext/
974        "DESCRIPTION",
975        "ERR",
976        "ESP_DIF_LIBRARY",  # Referenced in CMake comment
977        "EXPERIMENTAL",
978        "EXTRA_FIRMWARE_DIR", # Linux, in boards/xtensa/intel_adsp_cavs25/doc
979        "FFT",  # Used as an example in cmake/extensions.cmake
980        "FLAG",  # Used as an example
981        "FOO",
982        "FOO_LOG_LEVEL",
983        "FOO_SETTING_1",
984        "FOO_SETTING_2",
985        "HEAP_MEM_POOL_ADD_SIZE_", # Used as an option matching prefix
986        "HUGETLBFS",          # Linux, in boards/xtensa/intel_adsp_cavs25/doc
987        "LIBGCC_RTLIB",
988        "LLVM_USE_LD",   # Both LLVM_USE_* are in cmake/toolchain/llvm/Kconfig
989        "LLVM_USE_LLD",  # which are only included if LLVM is selected but
990                         # not other toolchains. Compliance check would complain,
991                         # for example, if you are using GCC.
992        "LOG_BACKEND_MOCK_OUTPUT_DEFAULT", #Referenced in tests/subsys/logging/log_syst
993        "LOG_BACKEND_MOCK_OUTPUT_SYST", #Referenced in testcase.yaml of log_syst test
994        "LSM6DSO_INT_PIN",
995        "MCUBOOT_ACTION_HOOKS",     # Used in (sysbuild-based) test
996        "MCUBOOT_CLEANUP_ARM_CORE", # Used in (sysbuild-based) test
997        "MCUBOOT_DOWNGRADE_PREVENTION", # but symbols are defined in MCUboot
998                                        # itself.
999        "MCUBOOT_LOG_LEVEL_INF",
1000        "MCUBOOT_LOG_LEVEL_WRN",        # Used in example adjusting MCUboot
1001                                        # config,
1002        "MCUBOOT_SERIAL",           # Used in (sysbuild-based) test/
1003                                    # documentation
1004        "MCUMGR_GRP_EXAMPLE_OTHER_HOOK", # Used in documentation
1005        "MISSING",
1006        "MODULES",
1007        "MODVERSIONS",        # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1008        "MYFEATURE",
1009        "MY_DRIVER_0",
1010        "NORMAL_SLEEP",  # #defined by RV32M1 in ext/
1011        "NRF_WIFI_FW_BIN", # Directly passed from CMakeLists.txt
1012        "OPT",
1013        "OPT_0",
1014        "PEDO_THS_MIN",
1015        "PSA_H", # This is used in config-psa.h as guard for the header file
1016        "REG1",
1017        "REG2",
1018        "RIMAGE_SIGNING_SCHEMA",  # Optional module
1019        "SECURITY_LOADPIN",   # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1020        "SEL",
1021        "SHIFT",
1022        "SINGLE_APPLICATION_SLOT", # Used in sysbuild for MCUboot configuration
1023        "SOC_SERIES_", # Used as regex in scripts/utils/board_v1_to_v2.py
1024        "SOC_WATCH",  # Issue 13749
1025        "SOME_BOOL",
1026        "SOME_INT",
1027        "SOME_OTHER_BOOL",
1028        "SOME_STRING",
1029        "SRAM2",  # Referenced in a comment in samples/application_development
1030        "STACK_SIZE",  # Used as an example in the Kconfig docs
1031        "STD_CPP",  # Referenced in CMake comment
1032        "TEST1",
1033        "TOOLCHAIN_ARCMWDT_SUPPORTS_THREAD_LOCAL_STORAGE", # The symbol is defined in the toolchain
1034                                                    # Kconfig which is sourced based on Zephyr
1035                                                    # toolchain variant and therefore not visible
1036                                                    # to compliance.
1037        "TYPE_BOOLEAN",
1038        "USB_CONSOLE",
1039        "USE_STDC_",
1040        "WHATEVER",
1041        "ZEPHYR_TRY_MASS_ERASE", # MCUBoot setting described in sysbuild
1042                                 # documentation
1043        "ZTEST_FAIL_TEST_",  # regex in tests/ztest/fail/CMakeLists.txt
1044        # zephyr-keep-sorted-stop
1045    }
1046
1047
1048class KconfigBasicCheck(KconfigCheck):
1049    """
1050    Checks if we are introducing any new warnings/errors with Kconfig,
1051    for example using undefined Kconfig variables.
1052    This runs the basic Kconfig test, which is checking only for undefined
1053    references inside the Kconfig tree.
1054    """
1055    name = "KconfigBasic"
1056    doc = "See https://docs.zephyrproject.org/latest/build/kconfig/tips.html for more details."
1057    path_hint = "<zephyr-base>"
1058
1059    def run(self):
1060        super().run(full=False)
1061
1062class KconfigBasicNoModulesCheck(KconfigCheck):
1063    """
1064    Checks if we are introducing any new warnings/errors with Kconfig when no
1065    modules are available. Catches symbols used in the main repository but
1066    defined only in a module.
1067    """
1068    name = "KconfigBasicNoModules"
1069    doc = "See https://docs.zephyrproject.org/latest/build/kconfig/tips.html for more details."
1070    path_hint = "<zephyr-base>"
1071    def run(self):
1072        super().run(full=False, no_modules=True)
1073
1074
1075class KconfigHWMv2Check(KconfigCheck, ComplianceTest):
1076    """
1077    This runs the Kconfig test for board and SoC v2 scheme.
1078    This check ensures that all symbols inside the v2 scheme is also defined
1079    within the same tree.
1080    This ensures the board and SoC trees are fully self-contained and reusable.
1081    """
1082    name = "KconfigHWMv2"
1083    doc = "See https://docs.zephyrproject.org/latest/guides/kconfig/index.html for more details."
1084
1085    def run(self):
1086        # Use dedicated Kconfig board / soc v2 scheme file.
1087        # This file sources only v2 scheme tree.
1088        kconfig_file = os.path.join(os.path.dirname(__file__), "Kconfig.board.v2")
1089        super().run(full=False, hwm="v2", filename=kconfig_file)
1090
1091
1092class Nits(ComplianceTest):
1093    """
1094    Checks various nits in added/modified files. Doesn't check stuff that's
1095    already covered by e.g. checkpatch.pl and pylint.
1096    """
1097    name = "Nits"
1098    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#coding-style for more details."
1099    path_hint = "<git-top>"
1100
1101    def run(self):
1102        # Loop through added/modified files
1103        for fname in get_files(filter="d"):
1104            if "Kconfig" in fname:
1105                self.check_kconfig_header(fname)
1106                self.check_redundant_zephyr_source(fname)
1107
1108            if fname.startswith("dts/bindings/"):
1109                self.check_redundant_document_separator(fname)
1110
1111            if fname.endswith((".c", ".conf", ".cpp", ".dts", ".overlay",
1112                               ".h", ".ld", ".py", ".rst", ".txt", ".yaml",
1113                               ".yml")) or \
1114               "Kconfig" in fname or \
1115               "defconfig" in fname or \
1116               fname == "README":
1117
1118                self.check_source_file(fname)
1119
1120    def check_kconfig_header(self, fname):
1121        # Checks for a spammy copy-pasted header format
1122
1123        with open(os.path.join(GIT_TOP, fname), encoding="utf-8") as f:
1124            contents = f.read()
1125
1126        # 'Kconfig - yada yada' has a copy-pasted redundant filename at the
1127        # top. This probably means all of the header was copy-pasted.
1128        if re.match(r"\s*#\s*(K|k)config[\w.-]*\s*-", contents):
1129            self.failure(f"""
1130Please use this format for the header in '{fname}' (see
1131https://docs.zephyrproject.org/latest/build/kconfig/tips.html#header-comments-and-other-nits):
1132
1133    # <Overview of symbols defined in the file, preferably in plain English>
1134    (Blank line)
1135    # Copyright (c) 2019 ...
1136    # SPDX-License-Identifier: <License>
1137    (Blank line)
1138    (Kconfig definitions)
1139
1140Skip the "Kconfig - " part of the first line, since it's clear that the comment
1141is about Kconfig from context. The "# Kconfig - " is what triggers this
1142failure.
1143""")
1144
1145    def check_redundant_zephyr_source(self, fname):
1146        # Checks for 'source "$(ZEPHYR_BASE)/Kconfig[.zephyr]"', which can be
1147        # be simplified to 'source "Kconfig[.zephyr]"'
1148
1149        with open(os.path.join(GIT_TOP, fname), encoding="utf-8") as f:
1150            # Look for e.g. rsource as well, for completeness
1151            match = re.search(
1152                r'^\s*(?:o|r|or)?source\s*"\$\(?ZEPHYR_BASE\)?/(Kconfig(?:\.zephyr)?)"',
1153                f.read(), re.MULTILINE)
1154
1155            if match:
1156                self.failure("""
1157Redundant 'source "$(ZEPHYR_BASE)/{0}" in '{1}'. Just do 'source "{0}"'
1158instead. The $srctree environment variable already points to the Zephyr root,
1159and all 'source's are relative to it.""".format(match.group(1), fname))
1160
1161    def check_redundant_document_separator(self, fname):
1162        # Looks for redundant '...' document separators in bindings
1163
1164        with open(os.path.join(GIT_TOP, fname), encoding="utf-8") as f:
1165            if re.search(r"^\.\.\.", f.read(), re.MULTILINE):
1166                self.failure(f"""\
1167Redundant '...' document separator in {fname}. Binding YAML files are never
1168concatenated together, so no document separators are needed.""")
1169
1170    def check_source_file(self, fname):
1171        # Generic nits related to various source files
1172
1173        with open(os.path.join(GIT_TOP, fname), encoding="utf-8") as f:
1174            contents = f.read()
1175
1176        if not contents.endswith("\n"):
1177            self.failure(f"Missing newline at end of '{fname}'. Check your text "
1178                         f"editor settings.")
1179
1180        if contents.startswith("\n"):
1181            self.failure(f"Please remove blank lines at start of '{fname}'")
1182
1183        if contents.endswith("\n\n"):
1184            self.failure(f"Please remove blank lines at end of '{fname}'")
1185
1186
1187class GitDiffCheck(ComplianceTest):
1188    """
1189    Checks for conflict markers or whitespace errors with git diff --check
1190    """
1191    name = "GitDiffCheck"
1192    doc = "Git conflict markers and whitespace errors are not allowed in added changes"
1193    path_hint = "<git-top>"
1194
1195    def run(self):
1196        offending_lines = []
1197        # Use regex to filter out unnecessay output
1198        # Reason: `--check` is mutually exclusive with `--name-only` and `-s`
1199        p = re.compile(r"\S+\: .*\.")
1200
1201        for shaidx in get_shas(COMMIT_RANGE):
1202            # Ignore non-zero return status code
1203            # Reason: `git diff --check` sets the return code to the number of offending lines
1204            diff = git("diff", f"{shaidx}^!", "--check", ignore_non_zero=True)
1205
1206            lines = p.findall(diff)
1207            lines = map(lambda x: f"{shaidx}: {x}", lines)
1208            offending_lines.extend(lines)
1209
1210        if len(offending_lines) > 0:
1211            self.failure("\n".join(offending_lines))
1212
1213
1214class GitLint(ComplianceTest):
1215    """
1216    Runs gitlint on the commits and finds issues with style and syntax
1217
1218    """
1219    name = "Gitlint"
1220    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#commit-guidelines for more details"
1221    path_hint = "<git-top>"
1222
1223    def run(self):
1224        # By default gitlint looks for .gitlint configuration only in
1225        # the current directory
1226        try:
1227            subprocess.run('gitlint --commits ' + COMMIT_RANGE,
1228                           check=True,
1229                           stdout=subprocess.PIPE,
1230                           stderr=subprocess.STDOUT,
1231                           shell=True, cwd=GIT_TOP)
1232
1233        except subprocess.CalledProcessError as ex:
1234            self.failure(ex.output.decode("utf-8"))
1235
1236
1237class PyLint(ComplianceTest):
1238    """
1239    Runs pylint on all .py files, with a limited set of checks enabled. The
1240    configuration is in the pylintrc file.
1241    """
1242    name = "Pylint"
1243    doc = "See https://www.pylint.org/ for more details"
1244    path_hint = "<git-top>"
1245
1246    def run(self):
1247        # Path to pylint configuration file
1248        pylintrc = os.path.abspath(os.path.join(os.path.dirname(__file__),
1249                                                "pylintrc"))
1250
1251        # Path to additional pylint check scripts
1252        check_script_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
1253                                                        "../pylint/checkers"))
1254
1255        # List of files added/modified by the commit(s).
1256        files = get_files(filter="d")
1257
1258        # Filter out everything but Python files. Keep filenames
1259        # relative (to GIT_TOP) to stay farther from any command line
1260        # limit.
1261        py_files = filter_py(GIT_TOP, files)
1262        if not py_files:
1263            return
1264
1265        python_environment = os.environ.copy()
1266        if "PYTHONPATH" in python_environment:
1267            python_environment["PYTHONPATH"] = check_script_dir + ":" + \
1268                                               python_environment["PYTHONPATH"]
1269        else:
1270            python_environment["PYTHONPATH"] = check_script_dir
1271
1272        pylintcmd = ["pylint", "--output-format=json2", "--rcfile=" + pylintrc,
1273                     "--load-plugins=argparse-checker"] + py_files
1274        logger.info(cmd2str(pylintcmd))
1275        try:
1276            subprocess.run(pylintcmd,
1277                           check=True,
1278                           stdout=subprocess.PIPE,
1279                           stderr=subprocess.STDOUT,
1280                           cwd=GIT_TOP,
1281                           env=python_environment)
1282        except subprocess.CalledProcessError as ex:
1283            output = ex.output.decode("utf-8")
1284            messages = json.loads(output)['messages']
1285            for m in messages:
1286                severity = 'unknown'
1287                if m['messageId'][0] in ('F', 'E'):
1288                    severity = 'error'
1289                elif m['messageId'][0] in ('W','C', 'R', 'I'):
1290                    severity = 'warning'
1291                self.fmtd_failure(severity, m['messageId'], m['path'],
1292                                  m['line'], col=str(m['column']), desc=m['message']
1293                                  + f" ({m['symbol']})")
1294
1295            if len(messages) == 0:
1296                # If there are no specific messages add the whole output as a failure
1297                self.failure(output)
1298
1299
1300def filter_py(root, fnames):
1301    # PyLint check helper. Returns all Python script filenames among the
1302    # filenames in 'fnames', relative to directory 'root'.
1303    #
1304    # Uses the python-magic library, so that we can detect Python
1305    # files that don't end in .py as well. python-magic is a frontend
1306    # to libmagic, which is also used by 'file'.
1307    return [fname for fname in fnames
1308            if (fname.endswith(".py") or
1309             magic.from_file(os.path.join(root, fname),
1310                             mime=True) == "text/x-python")]
1311
1312
1313class Identity(ComplianceTest):
1314    """
1315    Checks if Emails of author and signed-off messages are consistent.
1316    """
1317    name = "Identity"
1318    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#commit-guidelines for more details"
1319    # git rev-list and git log don't depend on the current (sub)directory
1320    # unless explicited
1321    path_hint = "<git-top>"
1322
1323    def run(self):
1324        for shaidx in get_shas(COMMIT_RANGE):
1325            commit = git("log", "--decorate=short", "-n 1", shaidx)
1326            signed = []
1327            author = ""
1328            sha = ""
1329            parsed_addr = None
1330            for line in commit.split("\n"):
1331                match = re.search(r"^commit\s([^\s]*)", line)
1332                if match:
1333                    sha = match.group(1)
1334                match = re.search(r"^Author:\s(.*)", line)
1335                if match:
1336                    author = match.group(1)
1337                    parsed_addr = parseaddr(author)
1338                match = re.search(r"signed-off-by:\s(.*)", line, re.IGNORECASE)
1339                if match:
1340                    signed.append(match.group(1))
1341
1342            error1 = f"{sha}: author email ({author}) needs to match one of " \
1343                     f"the signed-off-by entries."
1344            error2 = f"{sha}: author email ({author}) does not follow the " \
1345                     f"syntax: First Last <email>."
1346            error3 = f"{sha}: author email ({author}) must be a real email " \
1347                     f"and cannot end in @users.noreply.github.com"
1348            failure = None
1349            if author not in signed:
1350                failure = error1
1351
1352            if not parsed_addr or len(parsed_addr[0].split(" ")) < 2:
1353                if not failure:
1354
1355                    failure = error2
1356                else:
1357                    failure = failure + "\n" + error2
1358            elif parsed_addr[1].endswith("@users.noreply.github.com"):
1359                failure = error3
1360
1361            if failure:
1362                self.failure(failure)
1363
1364
1365class BinaryFiles(ComplianceTest):
1366    """
1367    Check that the diff contains no binary files.
1368    """
1369    name = "BinaryFiles"
1370    doc = "No binary files allowed."
1371    path_hint = "<git-top>"
1372
1373    def run(self):
1374        BINARY_ALLOW_PATHS = ("doc/", "boards/", "samples/")
1375        # svg files are always detected as binary, see .gitattributes
1376        BINARY_ALLOW_EXT = (".jpg", ".jpeg", ".png", ".svg", ".webp")
1377
1378        for stat in git("diff", "--numstat", "--diff-filter=A",
1379                        COMMIT_RANGE).splitlines():
1380            added, deleted, fname = stat.split("\t")
1381            if added == "-" and deleted == "-":
1382                if (fname.startswith(BINARY_ALLOW_PATHS) and
1383                    fname.endswith(BINARY_ALLOW_EXT)):
1384                    continue
1385                self.failure(f"Binary file not allowed: {fname}")
1386
1387
1388class ImageSize(ComplianceTest):
1389    """
1390    Check that any added image is limited in size.
1391    """
1392    name = "ImageSize"
1393    doc = "Check the size of image files."
1394    path_hint = "<git-top>"
1395
1396    def run(self):
1397        SIZE_LIMIT = 250 << 10
1398        BOARD_SIZE_LIMIT = 100 << 10
1399
1400        for file in get_files(filter="d"):
1401            full_path = os.path.join(GIT_TOP, file)
1402            mime_type = magic.from_file(full_path, mime=True)
1403
1404            if not mime_type.startswith("image/"):
1405                continue
1406
1407            size = os.path.getsize(full_path)
1408
1409            limit = SIZE_LIMIT
1410            if file.startswith("boards/"):
1411                limit = BOARD_SIZE_LIMIT
1412
1413            if size > limit:
1414                self.failure(f"Image file too large: {file} reduce size to "
1415                             f"less than {limit >> 10}kB")
1416
1417
1418class MaintainersFormat(ComplianceTest):
1419    """
1420    Check that MAINTAINERS file parses correctly.
1421    """
1422    name = "MaintainersFormat"
1423    doc = "Check that MAINTAINERS file parses correctly."
1424    path_hint = "<git-top>"
1425
1426    def run(self):
1427        MAINTAINERS_FILES = ["MAINTAINERS.yml", "MAINTAINERS.yaml"]
1428
1429        for file in MAINTAINERS_FILES:
1430            if not os.path.exists(file):
1431                continue
1432
1433            try:
1434                Maintainers(file)
1435            except MaintainersError as ex:
1436                self.failure(f"Error parsing {file}: {ex}")
1437
1438class ModulesMaintainers(ComplianceTest):
1439    """
1440    Check that all modules have a MAINTAINERS entry.
1441    """
1442    name = "ModulesMaintainers"
1443    doc = "Check that all modules have a MAINTAINERS entry."
1444    path_hint = "<git-top>"
1445
1446    def run(self):
1447        MAINTAINERS_FILES = ["MAINTAINERS.yml", "MAINTAINERS.yaml"]
1448
1449        manifest = Manifest.from_file()
1450
1451        maintainers_file = None
1452        for file in MAINTAINERS_FILES:
1453            if os.path.exists(file):
1454                maintainers_file = file
1455                break
1456        if not maintainers_file:
1457            return
1458
1459        maintainers = Maintainers(maintainers_file)
1460
1461        for project in manifest.get_projects([]):
1462            if not manifest.is_active(project):
1463                continue
1464
1465            if isinstance(project, ManifestProject):
1466                continue
1467
1468            area = f"West project: {project.name}"
1469            if area not in maintainers.areas:
1470                self.failure(f"Missing {maintainers_file} entry for: \"{area}\"")
1471
1472
1473class YAMLLint(ComplianceTest):
1474    """
1475    YAMLLint
1476    """
1477    name = "YAMLLint"
1478    doc = "Check YAML files with YAMLLint."
1479    path_hint = "<git-top>"
1480
1481    def run(self):
1482        config_file = os.path.join(ZEPHYR_BASE, ".yamllint")
1483
1484        for file in get_files(filter="d"):
1485            if Path(file).suffix not in ['.yaml', '.yml']:
1486                continue
1487
1488            yaml_config = config.YamlLintConfig(file=config_file)
1489
1490            if file.startswith(".github/"):
1491                # Tweak few rules for workflow files.
1492                yaml_config.rules["line-length"] = False
1493                yaml_config.rules["truthy"]["allowed-values"].extend(['on', 'off'])
1494            elif file == ".codecov.yml":
1495                yaml_config.rules["truthy"]["allowed-values"].extend(['yes', 'no'])
1496
1497            with open(file, 'r') as fp:
1498                for p in linter.run(fp, yaml_config):
1499                    self.fmtd_failure('warning', f'YAMLLint ({p.rule})', file,
1500                                      p.line, col=p.column, desc=p.desc)
1501
1502
1503class SphinxLint(ComplianceTest):
1504    """
1505    SphinxLint
1506    """
1507
1508    name = "SphinxLint"
1509    doc = "Check Sphinx/reStructuredText files with sphinx-lint."
1510    path_hint = "<git-top>"
1511
1512    # Checkers added/removed to sphinx-lint's default set
1513    DISABLE_CHECKERS = ["horizontal-tab", "missing-space-before-default-role"]
1514    ENABLE_CHECKERS = ["default-role"]
1515
1516    def run(self):
1517        for file in get_files():
1518            if not file.endswith(".rst"):
1519                continue
1520
1521            try:
1522                # sphinx-lint does not expose a public API so interaction is done via CLI
1523                subprocess.run(
1524                    f"sphinx-lint -d {','.join(self.DISABLE_CHECKERS)} -e {','.join(self.ENABLE_CHECKERS)} {file}",
1525                    check=True,
1526                    stdout=subprocess.PIPE,
1527                    stderr=subprocess.STDOUT,
1528                    shell=True,
1529                    cwd=GIT_TOP,
1530                )
1531
1532            except subprocess.CalledProcessError as ex:
1533                for line in ex.output.decode("utf-8").splitlines():
1534                    match = re.match(r"^(.*):(\d+): (.*)$", line)
1535
1536                    if match:
1537                        self.fmtd_failure(
1538                            "error",
1539                            "SphinxLint",
1540                            match.group(1),
1541                            int(match.group(2)),
1542                            desc=match.group(3),
1543                        )
1544
1545
1546class KeepSorted(ComplianceTest):
1547    """
1548    Check for blocks of code or config that should be kept sorted.
1549    """
1550    name = "KeepSorted"
1551    doc = "Check for blocks of code or config that should be kept sorted."
1552    path_hint = "<git-top>"
1553
1554    MARKER = "zephyr-keep-sorted"
1555
1556    def block_check_sorted(self, block_data, regex):
1557        def _test_indent(txt: str):
1558            return txt.startswith((" ", "\t"))
1559
1560        if regex is None:
1561            block_data = textwrap.dedent(block_data)
1562
1563        lines = block_data.splitlines()
1564        last = ''
1565
1566        for idx, line in enumerate(lines):
1567            if not line.strip():
1568                # Ignore blank lines
1569                continue
1570
1571            if regex:
1572                # check for regex
1573                if not re.match(regex, line):
1574                    continue
1575            else:
1576                if _test_indent(line):
1577                    continue
1578
1579                # Fold back indented lines after the current one
1580                for cont in takewhile(_test_indent, lines[idx + 1:]):
1581                    line += cont.strip()
1582
1583            if line < last:
1584                return idx
1585
1586            last = line
1587
1588        return -1
1589
1590    def check_file(self, file, fp):
1591        mime_type = magic.from_file(file, mime=True)
1592
1593        if not mime_type.startswith("text/"):
1594            return
1595
1596        block_data = ""
1597        in_block = False
1598
1599        start_marker = f"{self.MARKER}-start"
1600        stop_marker = f"{self.MARKER}-stop"
1601        regex_marker = r"re\((.+)\)"
1602        start_line = 0
1603        regex = None
1604
1605        for line_num, line in enumerate(fp.readlines(), start=1):
1606            if start_marker in line:
1607                if in_block:
1608                    desc = f"nested {start_marker}"
1609                    self.fmtd_failure("error", "KeepSorted", file, line_num,
1610                                     desc=desc)
1611                in_block = True
1612                block_data = ""
1613                start_line = line_num + 1
1614
1615                # Test for a regex block
1616                match = re.search(regex_marker, line)
1617                regex = match.group(1) if match else None
1618            elif stop_marker in line:
1619                if not in_block:
1620                    desc = f"{stop_marker} without {start_marker}"
1621                    self.fmtd_failure("error", "KeepSorted", file, line_num,
1622                                     desc=desc)
1623                in_block = False
1624
1625                idx = self.block_check_sorted(block_data, regex)
1626                if idx >= 0:
1627                    desc = f"sorted block has out-of-order line at {start_line + idx}"
1628                    self.fmtd_failure("error", "KeepSorted", file, line_num,
1629                                      desc=desc)
1630            elif in_block:
1631                block_data += line
1632
1633        if in_block:
1634            self.failure(f"unterminated {start_marker} in {file}")
1635
1636    def run(self):
1637        for file in get_files(filter="d"):
1638            with open(file, "r") as fp:
1639                self.check_file(file, fp)
1640
1641
1642class Ruff(ComplianceTest):
1643    """
1644    Ruff
1645    """
1646    name = "Ruff"
1647    doc = "Check python files with ruff."
1648    path_hint = "<git-top>"
1649
1650    def run(self):
1651        for file in get_files(filter="d"):
1652            if not file.endswith(".py"):
1653                continue
1654
1655            try:
1656                subprocess.run(
1657                    f"ruff check --force-exclude --output-format=json {file}",
1658                    check=True,
1659                    stdout=subprocess.PIPE,
1660                    stderr=subprocess.DEVNULL,
1661                    shell=True,
1662                    cwd=GIT_TOP,
1663                )
1664            except subprocess.CalledProcessError as ex:
1665                output = ex.output.decode("utf-8")
1666                messages = json.loads(output)
1667                for m in messages:
1668                    self.fmtd_failure(
1669                        "error",
1670                        f'Python lint error ({m.get("code")}) see {m.get("url")}',
1671                        file,
1672                        line=m.get("location", {}).get("row"),
1673                        col=m.get("location", {}).get("column"),
1674                        end_line=m.get("end_location", {}).get("row"),
1675                        end_col=m.get("end_location", {}).get("column"),
1676                        desc=m.get("message"),
1677                    )
1678            try:
1679                subprocess.run(
1680                    f"ruff format --force-exclude --diff {file}",
1681                    check=True,
1682                    shell=True,
1683                    cwd=GIT_TOP,
1684                )
1685            except subprocess.CalledProcessError:
1686                desc = f"Run 'ruff format {file}'"
1687                self.fmtd_failure("error", "Python format error", file, desc=desc)
1688
1689
1690class TextEncoding(ComplianceTest):
1691    """
1692    Check that any text file is encoded in ascii or utf-8.
1693    """
1694    name = "TextEncoding"
1695    doc = "Check the encoding of text files."
1696    path_hint = "<git-top>"
1697
1698    ALLOWED_CHARSETS = ["us-ascii", "utf-8"]
1699
1700    def run(self):
1701        m = magic.Magic(mime=True, mime_encoding=True)
1702
1703        for file in get_files(filter="d"):
1704            full_path = os.path.join(GIT_TOP, file)
1705            mime_type = m.from_file(full_path)
1706
1707            if not mime_type.startswith("text/"):
1708                continue
1709
1710            # format is "text/<type>; charset=<charset>"
1711            if mime_type.rsplit('=')[-1] not in self.ALLOWED_CHARSETS:
1712                desc = f"Text file with unsupported encoding: {file} has mime type {mime_type}"
1713                self.fmtd_failure("error", "TextEncoding", file, desc=desc)
1714
1715
1716def init_logs(cli_arg):
1717    # Initializes logging
1718
1719    global logger
1720
1721    level = os.environ.get('LOG_LEVEL', "WARN")
1722
1723    console = logging.StreamHandler()
1724    console.setFormatter(logging.Formatter('%(levelname)-8s: %(message)s'))
1725
1726    logger = logging.getLogger('')
1727    logger.addHandler(console)
1728    logger.setLevel(cli_arg or level)
1729
1730    logger.info("Log init completed, level=%s",
1731                 logging.getLevelName(logger.getEffectiveLevel()))
1732
1733
1734def inheritors(klass):
1735    subclasses = set()
1736    work = [klass]
1737    while work:
1738        parent = work.pop()
1739        for child in parent.__subclasses__():
1740            if child not in subclasses:
1741                subclasses.add(child)
1742                work.append(child)
1743    return subclasses
1744
1745
1746def annotate(res):
1747    """
1748    https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#about-workflow-commands
1749    """
1750    msg = res.message.replace('%', '%25').replace('\n', '%0A').replace('\r', '%0D')
1751    notice = f'::{res.severity} file={res.file}' + \
1752             (f',line={res.line}' if res.line else '') + \
1753             (f',col={res.col}' if res.col else '') + \
1754             (f',endLine={res.end_line}' if res.end_line else '') + \
1755             (f',endColumn={res.end_col}' if res.end_col else '') + \
1756             f',title={res.title}::{msg}'
1757    print(notice)
1758
1759
1760def resolve_path_hint(hint):
1761    if hint == "<zephyr-base>":
1762        return ZEPHYR_BASE
1763    elif hint == "<git-top>":
1764        return GIT_TOP
1765    else:
1766        return hint
1767
1768
1769def parse_args(argv):
1770
1771    default_range = 'HEAD~1..HEAD'
1772    parser = argparse.ArgumentParser(
1773        description="Check for coding style and documentation warnings.", allow_abbrev=False)
1774    parser.add_argument('-c', '--commits', default=default_range,
1775                        help=f'''Commit range in the form: a..[b], default is
1776                        {default_range}''')
1777    parser.add_argument('-o', '--output', default="compliance.xml",
1778                        help='''Name of outfile in JUnit format,
1779                        default is ./compliance.xml''')
1780    parser.add_argument('-n', '--no-case-output', action="store_true",
1781                        help="Do not store the individual test case output.")
1782    parser.add_argument('-l', '--list', action="store_true",
1783                        help="List all checks and exit")
1784    parser.add_argument("-v", "--loglevel", choices=['DEBUG', 'INFO', 'WARNING',
1785                                                     'ERROR', 'CRITICAL'],
1786                        help="python logging level")
1787    parser.add_argument('-m', '--module', action="append", default=[],
1788                        help="Checks to run. All checks by default. (case " \
1789                        "insensitive)")
1790    parser.add_argument('-e', '--exclude-module', action="append", default=[],
1791                        help="Do not run the specified checks (case " \
1792                        "insensitive)")
1793    parser.add_argument('-j', '--previous-run', default=None,
1794                        help='''Pre-load JUnit results in XML format
1795                        from a previous run and combine with new results.''')
1796    parser.add_argument('--annotate', action="store_true",
1797                        help="Print GitHub Actions-compatible annotations.")
1798
1799    return parser.parse_args(argv)
1800
1801def _main(args):
1802    # The "real" main(), which is wrapped to catch exceptions and report them
1803    # to GitHub. Returns the number of test failures.
1804
1805    global ZEPHYR_BASE
1806    ZEPHYR_BASE = os.environ.get('ZEPHYR_BASE')
1807    if not ZEPHYR_BASE:
1808        # Let the user run this script as ./scripts/ci/check_compliance.py without
1809        #  making them set ZEPHYR_BASE.
1810        ZEPHYR_BASE = str(Path(__file__).resolve().parents[2])
1811
1812        # Propagate this decision to child processes.
1813        os.environ['ZEPHYR_BASE'] = ZEPHYR_BASE
1814
1815    # The absolute path of the top-level git directory. Initialize it here so
1816    # that issues running Git can be reported to GitHub.
1817    global GIT_TOP
1818    GIT_TOP = git("rev-parse", "--show-toplevel")
1819
1820    # The commit range passed in --commit, e.g. "HEAD~3"
1821    global COMMIT_RANGE
1822    COMMIT_RANGE = args.commits
1823
1824    init_logs(args.loglevel)
1825
1826    logger.info(f'Running tests on commit range {COMMIT_RANGE}')
1827
1828    if args.list:
1829        for testcase in sorted(inheritors(ComplianceTest), key=lambda x: x.name):
1830            print(testcase.name)
1831        return 0
1832
1833    # Load saved test results from an earlier run, if requested
1834    if args.previous_run:
1835        if not os.path.exists(args.previous_run):
1836            # This probably means that an earlier pass had an internal error
1837            # (the script is currently run multiple times by the ci-pipelines
1838            # repo). Since that earlier pass might've posted an error to
1839            # GitHub, avoid generating a GitHub comment here, by avoiding
1840            # sys.exit() (which gets caught in main()).
1841            print(f"error: '{args.previous_run}' not found",
1842                  file=sys.stderr)
1843            return 1
1844
1845        logging.info(f"Loading previous results from {args.previous_run}")
1846        for loaded_suite in JUnitXml.fromfile(args.previous_run):
1847            suite = loaded_suite
1848            break
1849    else:
1850        suite = TestSuite("Compliance")
1851
1852    included = list(map(lambda x: x.lower(), args.module))
1853    excluded = list(map(lambda x: x.lower(), args.exclude_module))
1854
1855    for testcase in inheritors(ComplianceTest):
1856        # "Modules" and "testcases" are the same thing. Better flags would have
1857        # been --tests and --exclude-tests or the like, but it's awkward to
1858        # change now.
1859
1860        if included and testcase.name.lower() not in included:
1861            continue
1862
1863        if testcase.name.lower() in excluded:
1864            print("Skipping " + testcase.name)
1865            continue
1866
1867        test = testcase()
1868        try:
1869            print(f"Running {test.name:16} tests in "
1870                  f"{resolve_path_hint(test.path_hint)} ...")
1871            test.run()
1872        except EndTest:
1873            pass
1874
1875        # Annotate if required
1876        if args.annotate:
1877            for res in test.fmtd_failures:
1878                annotate(res)
1879
1880        suite.add_testcase(test.case)
1881
1882    if args.output:
1883        xml = JUnitXml()
1884        xml.add_testsuite(suite)
1885        xml.update_statistics()
1886        xml.write(args.output, pretty=True)
1887
1888    failed_cases = []
1889    name2doc = {testcase.name: testcase.doc
1890                for testcase in inheritors(ComplianceTest)}
1891
1892    for case in suite:
1893        if case.result:
1894            if case.is_skipped:
1895                logging.warning(f"Skipped {case.name}")
1896            else:
1897                failed_cases.append(case)
1898        else:
1899            # Some checks can produce no .result
1900            logging.info(f"No JUnit result for {case.name}")
1901
1902    n_fails = len(failed_cases)
1903
1904    if n_fails:
1905        print(f"{n_fails} checks failed")
1906        for case in failed_cases:
1907            for res in case.result:
1908                errmsg = res.text.strip()
1909                logging.error(f"Test {case.name} failed: \n{errmsg}")
1910            if args.no_case_output:
1911                continue
1912            with open(f"{case.name}.txt", "w") as f:
1913                docs = name2doc.get(case.name)
1914                f.write(f"{docs}\n")
1915                for res in case.result:
1916                    errmsg = res.text.strip()
1917                    f.write(f'\n {errmsg}')
1918
1919    if args.output:
1920        print(f"\nComplete results in {args.output}")
1921    return n_fails
1922
1923
1924def main(argv=None):
1925    args = parse_args(argv)
1926
1927    try:
1928        # pylint: disable=unused-import
1929        from lxml import etree
1930    except ImportError:
1931        print("\nERROR: Python module lxml not installed, unable to proceed")
1932        print("See https://github.com/weiwei/junitparser/issues/99")
1933        return 1
1934
1935    try:
1936        n_fails = _main(args)
1937    except BaseException:
1938        # Catch BaseException instead of Exception to include stuff like
1939        # SystemExit (raised by sys.exit())
1940        print(f"Python exception in `{__file__}`:\n\n"
1941              f"```\n{traceback.format_exc()}\n```")
1942
1943        raise
1944
1945    sys.exit(n_fails)
1946
1947
1948def cmd2str(cmd):
1949    # Formats the command-line arguments in the iterable 'cmd' into a string,
1950    # for error messages and the like
1951
1952    return " ".join(shlex.quote(word) for word in cmd)
1953
1954
1955def err(msg):
1956    cmd = sys.argv[0]  # Empty if missing
1957    if cmd:
1958        cmd += ": "
1959    sys.exit(f"{cmd} error: {msg}")
1960
1961
1962if __name__ == "__main__":
1963    main(sys.argv[1:])
1964