1#!/usr/bin/env python3
2
3# Copyright (c) 2018,2020 Intel Corporation
4# Copyright (c) 2022 Nordic Semiconductor ASA
5# SPDX-License-Identifier: Apache-2.0
6
7import argparse
8import collections
9import json
10import logging
11import os
12import platform
13import re
14import shlex
15import shutil
16import subprocess
17import sys
18import tempfile
19import textwrap
20import traceback
21from collections.abc import Iterable
22from itertools import takewhile
23from pathlib import Path, PurePath
24
25import magic
26import unidiff
27import yaml
28from dotenv import load_dotenv
29from junitparser import Error, Failure, JUnitXml, Skipped, TestCase, TestSuite
30from reuse.project import Project
31from reuse.report import ProjectSubsetReport
32from west.manifest import Manifest, ManifestProject
33from yamllint import config, linter
34
35try:
36    from yaml import CSafeLoader as SafeLoader
37except ImportError:
38    from yaml import SafeLoader
39
40sys.path.insert(0, str(Path(__file__).resolve().parents[1]))
41import list_boards
42import list_hardware
43from get_maintainer import Maintainers, MaintainersError
44
45sys.path.insert(
46    0, str(Path(__file__).resolve().parents[2] / "scripts" / "dts" / "python-devicetree" / "src")
47)
48from devicetree import edtlib
49
50# Let the user run this script as ./scripts/ci/check_compliance.py without
51# making them set ZEPHYR_BASE.
52ZEPHYR_BASE = os.environ.get('ZEPHYR_BASE')
53if ZEPHYR_BASE:
54    ZEPHYR_BASE = Path(ZEPHYR_BASE)
55else:
56    ZEPHYR_BASE = Path(__file__).resolve().parents[2]
57    # Propagate this decision to child processes.
58    os.environ['ZEPHYR_BASE'] = str(ZEPHYR_BASE)
59
60# Initialize the property names allowlist
61BINDINGS_PROPERTIES_AL = None
62with open(Path(__file__).parents[1] / 'bindings_properties_allowlist.yaml') as f:
63    allowlist = yaml.safe_load(f.read())
64    if allowlist is not None:
65        BINDINGS_PROPERTIES_AL = set(allowlist)
66    else:
67        BINDINGS_PROPERTIES_AL = set()
68
69logger = None
70
71
72def git(*args, cwd=None, ignore_non_zero=False):
73    # Helper for running a Git command. Returns the rstrip()ed stdout output.
74    # Called like git("diff"). Exits with SystemError (raised by sys.exit()) on
75    # errors if 'ignore_non_zero' is set to False (default: False). 'cwd' is the
76    # working directory to use (default: current directory).
77
78    git_cmd = ("git",) + args
79    try:
80        cp = subprocess.run(git_cmd, capture_output=True, cwd=cwd)
81    except OSError as e:
82        err(f"failed to run '{cmd2str(git_cmd)}': {e}")
83
84    if not ignore_non_zero and (cp.returncode or cp.stderr):
85        err(
86            f"'{cmd2str(git_cmd)}' exited with status {cp.returncode} and/or "
87            f"wrote to stderr.\n"
88            f"==stdout==\n"
89            f"{cp.stdout.decode('utf-8')}\n"
90            f"==stderr==\n"
91            f"{cp.stderr.decode('utf-8')}\n"
92        )
93
94    return cp.stdout.decode("utf-8").rstrip()
95
96
97def get_shas(refspec):
98    """
99    Returns the list of Git SHAs for 'refspec'.
100
101    :param refspec:
102    :return:
103    """
104    return git('rev-list', f'--max-count={-1 if "." in refspec else 1}', refspec).split()
105
106
107def get_files(filter=None, paths=None):
108    filter_arg = (f'--diff-filter={filter}',) if filter else ()
109    paths_arg = ('--', *paths) if paths else ()
110    out = git('diff', '--name-only', *filter_arg, COMMIT_RANGE, *paths_arg)
111    files = out.splitlines()
112    for file in list(files):
113        if not (GIT_TOP / file).exists():
114            # Drop submodule directories from the list.
115            files.remove(file)
116    return files
117
118
119def get_module_setting_root(root, settings_file):
120    """
121    Parse the Zephyr module generated settings file given by 'settings_file'
122    and return all root settings defined by 'root'.
123    """
124    # Invoke the script directly using the Python executable since this is
125    # not a module nor a pip-installed Python utility
126    root_paths = []
127
128    if os.path.exists(settings_file):
129        with open(settings_file) as fp_setting_file:
130            content = fp_setting_file.read()
131
132        lines = content.strip().split('\n')
133        for line in lines:
134            root = root.upper()
135            if line.startswith(f'"{root}_ROOT":'):
136                _, root_path = line.split(":", 1)
137                root_paths.append(Path(root_path.strip('"')))
138    return root_paths
139
140
141def get_vendor_prefixes(path, errfn=print) -> set[str]:
142    vendor_prefixes = set()
143    with open(path) as fp:
144        for line in fp.readlines():
145            line = line.strip()
146            if not line or line.startswith("#"):
147                continue
148            try:
149                vendor, _ = line.split("\t", 2)
150                vendor_prefixes.add(vendor)
151            except ValueError:
152                errfn(f"Invalid line in {path}:\"{line}\".")
153                errfn("Did you forget the tab character?")
154    return vendor_prefixes
155
156
157def zephyr_doc_detail_builder(doc_subpath: str) -> str:
158    return f"See https://docs.zephyrproject.org/latest{doc_subpath} for more details."
159
160
161class FmtdFailure(Failure):
162    def __init__(
163        self, severity, title, file, line=None, col=None, desc="", end_line=None, end_col=None
164    ):
165        self.severity = severity
166        self.title = title
167        self.file = file
168        self.line = line
169        self.col = col
170        self.end_line = end_line
171        self.end_col = end_col
172        self.desc = desc
173        description = f':{desc}' if desc else ''
174        msg_body = desc or title
175
176        txt = (
177            f'\n{title}{description}\nFile:{file}'
178            + (f'\nLine:{line}' if line else '')
179            + (f'\nColumn:{col}' if col else '')
180            + (f'\nEndLine:{end_line}' if end_line else '')
181            + (f'\nEndColumn:{end_col}' if end_col else '')
182        )
183        msg = f'{file}' + (f':{line}' if line else '') + f' {msg_body}'
184        typ = severity.lower()
185
186        super().__init__(msg, typ)
187
188        self.text = txt
189
190
191class ComplianceTest:
192    """
193    Base class for tests. Inheriting classes should have a run() method and set
194    these class variables:
195
196    name:
197      Test name
198
199    doc:
200      Link to documentation related to what's being tested
201
202    path_hint:
203      The path the test runs itself in. By default it uses the magic string
204      "<git-top>" which refers to the top-level repository directory.
205
206      This avoids running 'git' to find the top-level directory before main()
207      runs (class variable assignments run when the 'class ...' statement
208      runs). That avoids swallowing errors, because main() reports them to
209      GitHub.
210
211      Subclasses may override the default with a specific path or one of the
212      magic strings below:
213      - "<zephyr-base>" can be used to refer to the environment variable
214        ZEPHYR_BASE or, when missing, the calculated base of the zephyr tree.
215    """
216
217    path_hint = "<git-top>"
218
219    def __init__(self):
220        self.case = TestCase(type(self).name, "Guidelines")
221        # This is necessary because Failure can be subclassed, but since it is
222        # always restored form the element tree, the subclass is lost upon
223        # restoring
224        self.fmtd_failures = []
225
226    def _result(self, res, text):
227        res.text = text.rstrip()
228        self.case.result += [res]
229
230    def error(self, text, msg=None, type_="error"):
231        """
232        Signals a problem with running the test, with message 'msg'.
233
234        Raises an exception internally, so you do not need to put a 'return'
235        after error().
236        """
237        err = Error(msg or f'{type(self).name} error', type_)
238        self._result(err, text)
239
240        raise EndTest
241
242    def skip(self, text, msg=None, type_="skip"):
243        """
244        Signals that the test should be skipped, with message 'msg'.
245
246        Raises an exception internally, so you do not need to put a 'return'
247        after skip().
248        """
249        skpd = Skipped(msg or f'{type(self).name} skipped', type_)
250        self._result(skpd, text)
251
252        raise EndTest
253
254    def failure(self, text, msg=None, type_="failure"):
255        """
256        Signals that the test failed, with message 'msg'. Can be called many
257        times within the same test to report multiple failures.
258        """
259        fail = Failure(msg or f'{type(self).name} issues', type_)
260        self._result(fail, text)
261
262    def fmtd_failure(
263        self, severity, title, file, line=None, col=None, desc="", end_line=None, end_col=None
264    ):
265        """
266        Signals that the test failed, and store the information in a formatted
267        standardized manner. Can be called many times within the same test to
268        report multiple failures.
269        """
270        fail = FmtdFailure(severity, title, file, line, col, desc, end_line, end_col)
271        self._result(fail, fail.text)
272        self.fmtd_failures.append(fail)
273
274
275class EndTest(Exception):
276    """
277    Raised by ComplianceTest.error()/skip() to end the test.
278
279    Tests can raise EndTest themselves to immediately end the test, e.g. from
280    within a nested function call.
281    """
282
283
284class CheckPatch(ComplianceTest):
285    """
286    Runs checkpatch and reports found issues
287
288    """
289
290    name = "Checkpatch"
291    doc = zephyr_doc_detail_builder("/contribute/guidelines.html#coding-style")
292
293    def run(self):
294        checkpatch = ZEPHYR_BASE / 'scripts' / 'checkpatch.pl'
295        if not checkpatch.exists():
296            self.skip(f'{checkpatch} not found')
297
298        # check for Perl installation on Windows
299        if os.name == 'nt':
300            if not shutil.which('perl'):
301                err = "Perl not installed - required for checkpatch.pl. "
302                err += "Please install Perl or add to PATH."
303                self.failure(err)
304                return
305            else:
306                cmd = ['perl', checkpatch]
307
308        # Linux and MacOS
309        else:
310            cmd = [checkpatch]
311
312        cmd.extend(['--mailback', '--no-tree', '-'])
313        with subprocess.Popen(
314            ('git', 'diff', '--no-ext-diff', COMMIT_RANGE), stdout=subprocess.PIPE, cwd=GIT_TOP
315        ) as diff:
316            try:
317                subprocess.run(
318                    cmd,
319                    check=True,
320                    stdin=diff.stdout,
321                    stdout=subprocess.PIPE,
322                    stderr=subprocess.STDOUT,
323                    shell=False,
324                    cwd=GIT_TOP,
325                )
326
327            except subprocess.CalledProcessError as ex:
328                output = ex.output.decode("utf-8")
329                regex = (
330                    r'^\s*\S+:(\d+):\s*(ERROR|WARNING):(.+?):(.+)(?:\n|\r\n?)+'
331                    r'^\s*#(\d+):\s*FILE:\s*(.+):(\d+):'
332                )
333
334                matches = re.findall(regex, output, re.MULTILINE)
335
336                # add a guard here for excessive number of errors, do not try and
337                # process each one of them and instead push this as one failure.
338                if len(matches) > 500:
339                    self.failure(output)
340                    return
341
342                for m in matches:
343                    self.fmtd_failure(m[1].lower(), m[2], m[5], m[6], col=None, desc=m[3])
344
345                # If the regex has not matched add the whole output as a failure
346                if len(matches) == 0:
347                    self.failure(output)
348
349
350class BoardYmlCheck(ComplianceTest):
351    """
352    Check the board.yml files
353    """
354
355    name = "BoardYml"
356    doc = "Check the board.yml file format"
357
358    def check_board_file(self, file, vendor_prefixes):
359        """Validate a single board file."""
360        with open(file) as fp:
361            for line_num, line in enumerate(fp.readlines(), start=1):
362                if "vendor:" in line:
363                    _, vnd = line.strip().split(":", 2)
364                    vnd = vnd.strip()
365                    if vnd not in vendor_prefixes:
366                        desc = f"invalid vendor: {vnd}"
367                        self.fmtd_failure("error", "BoardYml", file, line_num, desc=desc)
368
369    def run(self):
370        path = resolve_path_hint(self.path_hint)
371        module_ymls = [path / "zephyr" / "module.yml", path / "zephyr" / "module.yaml"]
372
373        vendor_prefixes = {"others"}
374        # add vendor prefixes from the main zephyr repo
375        vendor_prefix_file = ZEPHYR_BASE / "dts" / "bindings" / "vendor-prefixes.txt"
376        vendor_prefixes |= get_vendor_prefixes(vendor_prefix_file, self.error)
377        # add vendor prefixes from the current repo
378        dts_root = None
379        for module_yml in module_ymls:
380            if module_yml.is_file():
381                with module_yml.open('r', encoding='utf-8') as f:
382                    meta = yaml.load(f.read(), Loader=SafeLoader)
383                    section = meta.get('build', dict())
384                    build_settings = section.get('settings', None)
385                    if build_settings:
386                        dts_root = build_settings.get('dts_root', None)
387
388        if dts_root:
389            vendor_prefix_file = Path(dts_root) / "dts" / "bindings" / "vendor-prefixes.txt"
390            if vendor_prefix_file.exists():
391                vendor_prefixes |= get_vendor_prefixes(vendor_prefix_file, self.error)
392
393        for file in path.glob("**/board.yml"):
394            self.check_board_file(file, vendor_prefixes)
395
396
397class ClangFormatCheck(ComplianceTest):
398    """
399    Check if clang-format reports any issues
400    """
401
402    name = "ClangFormat"
403    doc = zephyr_doc_detail_builder("/contribute/guidelines.html#clang-format")
404
405    def _process_patch_error(self, file: str, patch: unidiff.PatchedFile):
406        for hunk in patch:
407            # Strip the before and after context
408            before = next(i for i, v in enumerate(hunk) if str(v).startswith(('-', '+')))
409            after = next(i for i, v in enumerate(reversed(hunk)) if str(v).startswith(('-', '+')))
410            msg = "".join([str(line) for line in hunk[before : -after or None]])
411
412            # show the hunk at the last line
413            self.fmtd_failure(
414                "notice",
415                "You may want to run clang-format on this change",
416                file,
417                line=hunk.source_start + hunk.source_length - after,
418                desc=f'\r\n{msg}',
419            )
420
421    def run(self):
422        exe = f"clang-format-diff.{'exe' if platform.system() == 'Windows' else 'py'}"
423
424        for file in get_files():
425            if Path(file).suffix not in ['.c', '.h']:
426                continue
427
428            diff = subprocess.Popen(
429                ('git', 'diff', '-U0', '--no-color', COMMIT_RANGE, '--', file),
430                stdout=subprocess.PIPE,
431                cwd=GIT_TOP,
432            )
433            try:
434                subprocess.run(
435                    (exe, '-p1'),
436                    check=True,
437                    stdin=diff.stdout,
438                    stdout=subprocess.PIPE,
439                    stderr=subprocess.STDOUT,
440                    cwd=GIT_TOP,
441                )
442
443            except subprocess.CalledProcessError as ex:
444                patchset = unidiff.PatchSet.from_string(ex.output, encoding="utf-8")
445                for patch in patchset:
446                    self._process_patch_error(file, patch)
447
448
449class DevicetreeBindingsCheck(ComplianceTest):
450    """
451    Checks if we are introducing any unwanted properties in Devicetree Bindings.
452    """
453
454    name = "DevicetreeBindings"
455    doc = zephyr_doc_detail_builder("/build/dts/bindings.html")
456
457    def run(self, full=True):
458        bindings_diff, bindings = self.get_yaml_bindings()
459
460        # If no bindings are changed, skip this check.
461        try:
462            subprocess.check_call(['git', 'diff', '--quiet', COMMIT_RANGE] + bindings_diff)
463            nodiff = True
464        except subprocess.CalledProcessError:
465            nodiff = False
466        if nodiff:
467            self.skip('no changes to bindings were made')
468
469        for binding in bindings:
470            self.check(binding, self.check_yaml_property_name)
471            self.check(binding, self.required_false_check)
472
473    @staticmethod
474    def check(binding, callback):
475        while binding is not None:
476            callback(binding)
477            binding = binding.child_binding
478
479    def get_yaml_bindings(self):
480        """
481        Returns a list of 'dts/bindings/**/*.yaml'
482        """
483        from glob import glob
484
485        BINDINGS_PATH = 'dts/bindings/'
486        bindings_diff_dir, bindings = set(), []
487
488        for file_name in get_files(filter='d'):
489            if BINDINGS_PATH in file_name:
490                p = file_name.partition(BINDINGS_PATH)
491                bindings_diff_dir.add(os.path.join(p[0], p[1]))
492
493        for path in bindings_diff_dir:
494            yamls = glob(f'{os.fspath(path)}/**/*.yaml', recursive=True)
495            bindings.extend(yamls)
496
497        bindings = edtlib.bindings_from_paths(bindings, ignore_errors=True)
498        return list(bindings_diff_dir), bindings
499
500    def check_yaml_property_name(self, binding):
501        """
502        Checks if the property names in the binding file contain underscores.
503        """
504        for prop_name in binding.prop2specs:
505            if '_' in prop_name and prop_name not in BINDINGS_PROPERTIES_AL:
506                better_prop = prop_name.replace('_', '-')
507                print(
508                    f"Required: In '{binding.path}', "
509                    f"the property '{prop_name}' "
510                    f"should be renamed to '{better_prop}'."
511                )
512                self.failure(
513                    f"{binding.path}: property '{prop_name}' contains underscores.\n"
514                    f"\tUse '{better_prop}' instead unless this property name is from Linux.\n"
515                    "Or another authoritative upstream source of bindings for "
516                    f"compatible '{binding.compatible}'.\n"
517                    "\tHint: update 'bindings_properties_allowlist.yaml' if you need to "
518                    "override this check for this property."
519                )
520
521    def required_false_check(self, binding):
522        raw_props = binding.raw.get('properties', {})
523        for prop_name, raw_prop in raw_props.items():
524            if raw_prop.get('required') is False:
525                self.failure(
526                    f'{binding.path}: property "{prop_name}": '
527                    "'required: false' is redundant, please remove"
528                )
529
530
531class DevicetreeLintingCheck(ComplianceTest):
532    """
533    Checks if we are introducing syntax or formatting issues to devicetree files.
534    """
535
536    name = "DevicetreeLinting"
537    doc = zephyr_doc_detail_builder("/contribute/style/devicetree.html")
538    NPX_EXECUTABLE = "npx"
539
540    def ensure_npx(self) -> bool:
541        if not (npx_executable := shutil.which(self.NPX_EXECUTABLE)):
542            return False
543        try:
544            self.npx_exe = npx_executable
545            # --no prevents npx from fetching from registry
546            subprocess.run(
547                [self.npx_exe, "--prefix", "./scripts/ci", "--no", 'dts-linter', "--", "--version"],
548                stdout=subprocess.DEVNULL,
549                stderr=subprocess.DEVNULL,
550                check=True,
551                text=True,
552            )
553            return True
554        except subprocess.CalledProcessError:
555            return False
556
557    def _parse_json_output(self, cmd, cwd=None):
558        """Run command and parse single JSON output with issues array"""
559        result = subprocess.run(
560            cmd,
561            stdout=subprocess.PIPE,
562            stderr=subprocess.STDOUT,
563            check=False,
564            text=True,
565            cwd=cwd or GIT_TOP,
566        )
567
568        if not result.stdout.strip():
569            return None
570
571        try:
572            json_data = json.loads(result.stdout)
573            return json_data
574        except json.JSONDecodeError as e:
575            raise RuntimeError(f"Failed to parse dts-linter JSON output: {e}") from e
576
577    def _process_json_output(self, json_output: dict):
578        if "issues" not in json_output:
579            return
580
581        cwd = json_output.get("cwd", "")
582        logging.info(f"Processing issues from: {cwd}")
583
584        for issue in json_output["issues"]:
585            level = issue.get("level", "unknown")
586            message = issue.get("message", "")
587
588            if level == "info":
589                logging.info(message)
590            else:
591                title = issue.get("title", "")
592                file = issue.get("file", "")
593                line = issue.get("startLine", None)
594                col = issue.get("startCol", None)
595                end_line = issue.get("endLine", None)
596                end_col = issue.get("endCol", None)
597                self.fmtd_failure(level, title, file, line, col, message, end_line, end_col)
598
599    def run(self):
600        self.npx_exe = self.NPX_EXECUTABLE
601        # Get changed DTS files
602        dts_files = [
603            file for file in get_files(filter="d") if file.endswith((".dts", ".dtsi", ".overlay"))
604        ]
605
606        if not self.ensure_npx():
607            self.skip(
608                'dts-linter not installed. To run this check, '
609                'install Node.js and then run [npm --prefix ./scripts/ci ci] command inside '
610                'ZEPHYR_BASE'
611            )
612        if not dts_files:
613            self.skip('No DTS')
614
615        temp_patch_files = []
616        batch_size = 500
617
618        for i in range(0, len(dts_files), batch_size):
619            batch = dts_files[i : i + batch_size]
620
621            # use a temporary file for each batch
622            temp_patch = f"dts_linter_{i}.patch"
623            temp_patch_files.append(temp_patch)
624
625            cmd = [
626                self.npx_exe,
627                "--prefix",
628                "./scripts/ci",
629                "--no",
630                "dts-linter",
631                "--",
632                "--outputFormat",
633                "json",
634                "--format",
635                "--patchFile",
636                temp_patch,
637            ]
638            for file in batch:
639                cmd.extend(["--file", file])
640
641            try:
642                json_output = self._parse_json_output(cmd)
643                if json_output:
644                    self._process_json_output(json_output)
645
646            except subprocess.CalledProcessError as ex:
647                stderr_output = ex.stderr if ex.stderr else ""
648                if stderr_output.strip():
649                    self.failure(f"dts-linter found issues:\n{stderr_output}")
650                else:
651                    err = "dts-linter failed with no output. "
652                    err += "Make sure you install Node.js and then run "
653                    err += "[npm --prefix ./scripts/ci ci] inside ZEPHYR_BASE"
654                    self.failure(err)
655            except RuntimeError as ex:
656                self.failure(f"{ex}")
657
658        # merge all temp patch files into one
659        with open("dts_linter.patch", "wb") as final_patch:
660            for patch in temp_patch_files:
661                with open(patch, "rb") as f:
662                    shutil.copyfileobj(f, final_patch)
663
664        # cleanup
665        for patch in temp_patch_files:
666            os.remove(patch)
667
668
669class KconfigCheck(ComplianceTest):
670    """
671    Checks is we are introducing any new warnings/errors with Kconfig,
672    for example using undefined Kconfig variables.
673    """
674
675    name = "Kconfig"
676    doc = zephyr_doc_detail_builder("/build/kconfig/tips.html")
677
678    # Top-level Kconfig file. The path can be relative to srctree (ZEPHYR_BASE).
679    FILENAME = "Kconfig"
680
681    # Kconfig symbol prefix/namespace.
682    CONFIG_ = "CONFIG_"
683
684    def run(self):
685        kconf = self.parse_kconfig()
686
687        self.check_top_menu_not_too_long(kconf)
688        self.check_no_pointless_menuconfigs(kconf)
689        self.check_no_undef_within_kconfig(kconf)
690        self.check_no_redefined_in_defconfig(kconf)
691        self.check_no_enable_in_boolean_prompt(kconf)
692        self.check_soc_name_sync(kconf)
693        self.check_no_undef_outside_kconfig(kconf)
694        self.check_disallowed_defconfigs(kconf)
695
696    def get_modules(self, _module_dirs_file, modules_file, sysbuild_modules_file, settings_file):
697        """
698        Get a list of modules and put them in a file that is parsed by
699        Kconfig
700
701        This is needed to complete Kconfig sanity tests.
702
703        """
704        # Invoke the script directly using the Python executable since this is
705        # not a module nor a pip-installed Python utility
706        zephyr_module_path = ZEPHYR_BASE / "scripts" / "zephyr_module.py"
707        cmd = [
708            sys.executable,
709            zephyr_module_path,
710            '--kconfig-out',
711            modules_file,
712            '--sysbuild-kconfig-out',
713            sysbuild_modules_file,
714            '--settings-out',
715            settings_file,
716        ]
717        try:
718            subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
719        except subprocess.CalledProcessError as ex:
720            self.error(ex.output.decode("utf-8"))
721
722        modules_dir = ZEPHYR_BASE / 'modules'
723        modules = [name for name in os.listdir(modules_dir) if modules_dir / name / 'Kconfig']
724
725        with open(modules_file) as fp_module_file:
726            content = fp_module_file.read()
727
728        with open(modules_file, 'w') as fp_module_file:
729            for module in modules:
730                fp_module_file.write(
731                    "ZEPHYR_{}_KCONFIG = {}\n".format(
732                        re.sub('[^a-zA-Z0-9]', '_', module).upper(),
733                        modules_dir / module / 'Kconfig',
734                    )
735                )
736            fp_module_file.write(content)
737
738    def get_kconfig_dts(self, kconfig_dts_file, settings_file):
739        """
740        Generate the Kconfig.dts using dts/bindings as the source.
741
742        This is needed to complete Kconfig compliance tests.
743
744        """
745        # Invoke the script directly using the Python executable since this is
746        # not a module nor a pip-installed Python utility
747        zephyr_drv_kconfig_path = ZEPHYR_BASE / "scripts" / "dts" / "gen_driver_kconfig_dts.py"
748        binding_paths = []
749        binding_paths.append(ZEPHYR_BASE / "dts" / "bindings")
750
751        dts_root_paths = get_module_setting_root('dts', settings_file)
752        for p in dts_root_paths:
753            binding_paths.append(p / "dts" / "bindings")
754
755        cmd = [
756            sys.executable,
757            zephyr_drv_kconfig_path,
758            '--kconfig-out',
759            kconfig_dts_file,
760            '--bindings-dirs',
761        ]
762        for binding_path in binding_paths:
763            cmd.append(binding_path)
764        try:
765            subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
766        except subprocess.CalledProcessError as ex:
767            self.error(ex.output.decode("utf-8"))
768
769    def get_v2_model(self, kconfig_dir, settings_file):
770        """
771        Get lists of v2 boards and SoCs and put them in a file that is parsed by
772        Kconfig
773
774        This is needed to complete Kconfig sanity tests.
775        """
776        os.environ['HWM_SCHEME'] = 'v2'
777        os.environ["KCONFIG_BOARD_DIR"] = os.path.join(kconfig_dir, 'boards')
778
779        os.makedirs(os.path.join(kconfig_dir, 'boards'), exist_ok=True)
780        os.makedirs(os.path.join(kconfig_dir, 'soc'), exist_ok=True)
781        os.makedirs(os.path.join(kconfig_dir, 'arch'), exist_ok=True)
782
783        kconfig_file = os.path.join(kconfig_dir, 'boards', 'Kconfig')
784        kconfig_boards_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.boards')
785        kconfig_sysbuild_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.sysbuild')
786        kconfig_defconfig_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.defconfig')
787
788        board_roots = get_module_setting_root('board', settings_file)
789        board_roots.insert(0, ZEPHYR_BASE)
790        soc_roots = get_module_setting_root('soc', settings_file)
791        soc_roots.insert(0, ZEPHYR_BASE)
792        root_args = argparse.Namespace(
793            **{'board_roots': board_roots, 'soc_roots': soc_roots, 'board': None, 'board_dir': []}
794        )
795        v2_boards = list_boards.find_v2_boards(root_args).values()
796
797        with open(kconfig_defconfig_file, 'w') as fp:
798            for board in v2_boards:
799                for board_dir in board.directories:
800                    fp.write('osource "' + (board_dir / 'Kconfig.defconfig').as_posix() + '"\n')
801
802        with open(kconfig_sysbuild_file, 'w') as fp:
803            for board in v2_boards:
804                for board_dir in board.directories:
805                    fp.write('osource "' + (board_dir / 'Kconfig.sysbuild').as_posix() + '"\n')
806
807        with open(kconfig_boards_file, 'w') as fp:
808            for board in v2_boards:
809                board_str = 'BOARD_' + re.sub(r"[^a-zA-Z0-9_]", "_", board.name).upper()
810                fp.write('config  ' + board_str + '\n')
811                fp.write('\t bool\n')
812                for qualifier in list_boards.board_v2_qualifiers(board):
813                    board_str = (
814                        'BOARD_' + board.name + '_' + re.sub(r"[^a-zA-Z0-9_]", "_", qualifier)
815                    ).upper()
816                    fp.write('config  ' + board_str + '\n')
817                    fp.write('\t bool\n')
818                for board_dir in board.directories:
819                    fp.write(
820                        'source "' + (board_dir / ('Kconfig.' + board.name)).as_posix() + '"\n'
821                    )
822
823        with open(kconfig_file, 'w') as fp:
824            for board in v2_boards:
825                for board_dir in board.directories:
826                    fp.write('osource "' + (board_dir / 'Kconfig').as_posix() + '"\n')
827
828        kconfig_defconfig_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.defconfig')
829        kconfig_sysbuild_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.sysbuild')
830        kconfig_soc_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.soc')
831        kconfig_file = os.path.join(kconfig_dir, 'soc', 'Kconfig')
832
833        root_args = argparse.Namespace(**{'soc_roots': soc_roots})
834        v2_systems = list_hardware.find_v2_systems(root_args)
835
836        soc_folders = {folder for soc in v2_systems.get_socs() for folder in soc.folder}
837        with open(kconfig_defconfig_file, 'w') as fp:
838            for folder in soc_folders:
839                fp.write('osource "' + (Path(folder) / 'Kconfig.defconfig').as_posix() + '"\n')
840
841        with open(kconfig_sysbuild_file, 'w') as fp:
842            for folder in soc_folders:
843                fp.write('osource "' + (Path(folder) / 'Kconfig.sysbuild').as_posix() + '"\n')
844
845        with open(kconfig_soc_file, 'w') as fp:
846            for folder in soc_folders:
847                fp.write('source "' + (Path(folder) / 'Kconfig.soc').as_posix() + '"\n')
848
849        with open(kconfig_file, 'w') as fp:
850            for folder in soc_folders:
851                fp.write('source "' + (Path(folder) / 'Kconfig').as_posix() + '"\n')
852
853        kconfig_file = os.path.join(kconfig_dir, 'arch', 'Kconfig')
854
855        root_args = argparse.Namespace(**{'arch_roots': [ZEPHYR_BASE], 'arch': None})
856        v2_archs = list_hardware.find_v2_archs(root_args)
857
858        with open(kconfig_file, 'w') as fp:
859            for arch in v2_archs['archs']:
860                fp.write('source "' + (Path(arch['path']) / 'Kconfig').as_posix() + '"\n')
861
862    def parse_kconfig(self):
863        """
864        Returns a kconfiglib.Kconfig object for the Kconfig files. We reuse
865        this object for all tests to avoid having to reparse for each test.
866        """
867        # Put the Kconfiglib path first to make sure no local Kconfiglib version is
868        # used
869        kconfig_path = ZEPHYR_BASE / "scripts" / "kconfig"
870        if not kconfig_path.exists():
871            self.error(kconfig_path + " not found")
872
873        kconfiglib_dir = tempfile.mkdtemp(prefix="kconfiglib_")
874
875        sys.path.insert(0, str(kconfig_path))
876        # Import globally so that e.g. kconfiglib.Symbol can be referenced in
877        # tests
878        global kconfiglib
879        import kconfiglib
880
881        # Look up Kconfig files relative to ZEPHYR_BASE
882        os.environ["srctree"] = str(ZEPHYR_BASE)  # noqa: SIM112
883
884        # Parse the entire Kconfig tree, to make sure we see all symbols
885        os.environ["SOC_DIR"] = "soc/"
886        os.environ["ARCH_DIR"] = "arch/"
887        os.environ["BOARD"] = "boards"
888        os.environ["ARCH"] = "*"
889        os.environ["KCONFIG_BINARY_DIR"] = kconfiglib_dir
890        os.environ['DEVICETREE_CONF'] = "dummy"
891        os.environ['TOOLCHAIN_HAS_NEWLIB'] = "y"
892        kconfig_env_file = os.path.join(kconfiglib_dir, "kconfig_module_dirs.env")
893
894        # Older name for DEVICETREE_CONF, for compatibility with older Zephyr
895        # versions that don't have the renaming
896        os.environ["GENERATED_DTS_BOARD_CONF"] = "dummy"
897
898        # For multi repo support
899        self.get_modules(
900            kconfig_env_file,
901            os.path.join(kconfiglib_dir, "Kconfig.modules"),
902            os.path.join(kconfiglib_dir, "Kconfig.sysbuild.modules"),
903            os.path.join(kconfiglib_dir, "settings_file.txt"),
904        )
905        # For Kconfig.dts support
906        self.get_kconfig_dts(
907            os.path.join(kconfiglib_dir, "Kconfig.dts"),
908            os.path.join(kconfiglib_dir, "settings_file.txt"),
909        )
910        # For hardware model support (board, soc, arch)
911        self.get_v2_model(kconfiglib_dir, os.path.join(kconfiglib_dir, "settings_file.txt"))
912
913        # Tells Kconfiglib to generate warnings for all references to undefined
914        # symbols within Kconfig files
915        os.environ["KCONFIG_WARN_UNDEF"] = "y"
916
917        load_dotenv(kconfig_env_file)
918
919        try:
920            # Note this will both print warnings to stderr _and_ return
921            # them: so some warnings might get printed
922            # twice. "warn_to_stderr=False" could unfortunately cause
923            # some (other) warnings to never be printed.
924            return kconfiglib.Kconfig(filename=self.FILENAME)
925        except kconfiglib.KconfigError as e:
926            self.failure(str(e))
927            raise EndTest from e
928        finally:
929            # Clean up the temporary directory
930            shutil.rmtree(kconfiglib_dir)
931
932    def module_kconfigs(self, regex):
933        manifest = Manifest.from_file()
934        kconfigs = ""
935
936        # Use hard coded paths for Zephyr for tests, samples and ext. module root
937        tmp_output = git(
938            "grep",
939            "-I",
940            "-h",
941            "--perl-regexp",
942            regex,
943            "--",
944            ":tests",
945            ":samples",
946            ":modules",
947            cwd=ZEPHYR_BASE,
948            ignore_non_zero=True,
949        )
950
951        if len(tmp_output) > 0:
952            kconfigs += tmp_output + "\n"
953
954        for project in manifest.get_projects([]):
955            if not manifest.is_active(project):
956                continue
957
958            if not project.is_cloned():
959                continue
960
961            module_path = PurePath(project.abspath)
962            module_yml = module_path.joinpath('zephyr/module.yml')
963
964            if not Path(module_yml).is_file():
965                module_yml = module_path.joinpath('zephyr/module.yaml')
966
967            if Path(module_yml).is_file():
968                dirs = []
969
970                with Path(module_yml).open('r', encoding='utf-8') as f:
971                    meta = yaml.load(f.read(), Loader=SafeLoader)
972
973                for folder_type in ['samples', 'tests']:
974                    if folder_type in meta:
975                        for path_ext in meta[folder_type]:
976                            path_full = module_path.joinpath(path_ext)
977
978                            if Path(path_full).is_dir():
979                                dirs.append(":" + path_ext)
980
981                # Add ext. module root, if one is defined
982                if (
983                    'build' in meta
984                    and 'settings' in meta['build']
985                    and 'module_ext_root' in meta['build']['settings']
986                ):
987                    path_full = module_path.joinpath(meta['build']['settings']['module_ext_root'])
988
989                    if Path(path_full).is_dir():
990                        dirs.append(":" + meta['build']['settings']['module_ext_root'])
991
992                if len(dirs) > 0:
993                    tmp_output = git(
994                        "grep",
995                        "-I",
996                        "-h",
997                        "--perl-regexp",
998                        regex,
999                        "--",
1000                        *dirs,
1001                        cwd=module_path,
1002                        ignore_non_zero=True,
1003                    )
1004
1005                    if len(tmp_output) > 0:
1006                        kconfigs += tmp_output + "\n"
1007
1008        return kconfigs
1009
1010    def get_logging_syms(self, kconf):
1011        # Returns a set() with the names of the Kconfig symbols generated with
1012        # logging template in samples/tests folders. The Kconfig symbols doesn't
1013        # include `CONFIG_` and for each module declared there is one symbol
1014        # per suffix created.
1015
1016        suffixes = [
1017            "_LOG_LEVEL",
1018            "_LOG_LEVEL_DBG",
1019            "_LOG_LEVEL_ERR",
1020            "_LOG_LEVEL_INF",
1021            "_LOG_LEVEL_WRN",
1022            "_LOG_LEVEL_OFF",
1023            "_LOG_LEVEL_INHERIT",
1024            "_LOG_LEVEL_DEFAULT",
1025        ]
1026
1027        # Warning: Needs to work with both --perl-regexp and the 're' module.
1028        regex = r"^\s*(?:module\s*=\s*)([A-Z0-9_]+)\s*(?:#|$)"
1029
1030        # Grep samples/ and tests/ for symbol definitions in all modules
1031        grep_stdout = self.module_kconfigs(regex)
1032
1033        names = re.findall(regex, grep_stdout, re.MULTILINE)
1034
1035        kconf_syms = []
1036        for name in names:
1037            for suffix in suffixes:
1038                kconf_syms.append(f"{name}{suffix}")
1039
1040        return set(kconf_syms)
1041
1042    def module_disallowed_check(self, module_path, type, folder, meta, regex):
1043        # Returns a list with lines from git grep which includes Kconfigs from defconfig files
1044        entry = type + '_root'
1045        git_folder = ":" + folder
1046
1047        if entry in meta['build']['settings']:
1048            tmp_path = module_path.joinpath(meta['build']['settings'][entry])
1049
1050            if Path(tmp_path.joinpath(folder)).is_dir():
1051                tmp_output = git(
1052                    "grep",
1053                    "--line-number",
1054                    "-I",
1055                    "--null",
1056                    "--perl-regexp",
1057                    regex,
1058                    "--",
1059                    git_folder,
1060                    cwd=tmp_path,
1061                    ignore_non_zero=True,
1062                )
1063
1064                if len(tmp_output) > 0:
1065                    return tmp_output.splitlines()
1066        return []
1067
1068    def check_disallowed_defconfigs(self, kconf):
1069        """
1070        Checks that there are no disallowed Kconfigs used in board/SoC defconfig files
1071        """
1072        # Grep for symbol references.
1073        #
1074        # Example output line for a reference to CONFIG_FOO at line 17 of
1075        # foo/bar.c:
1076        #
1077        #   foo/bar.c<null>17<null>#ifdef CONFIG_FOO
1078        #
1079        # 'git grep --only-matching' would get rid of the surrounding context
1080        # ('#ifdef '), but it was added fairly recently (second half of 2018),
1081        # so we extract the references from each line ourselves instead.
1082        #
1083        # The regex uses word boundaries (\b) to isolate the reference, and
1084        # negative lookahead to automatically allowlist the following:
1085        #
1086        #  - ##, for token pasting (CONFIG_FOO_##X)
1087        #
1088        #  - $, e.g. for CMake variable expansion (CONFIG_FOO_${VAR})
1089        #
1090        #  - @, e.g. for CMakes's configure_file() (CONFIG_FOO_@VAR@)
1091        #
1092        #  - {, e.g. for Python scripts ("CONFIG_FOO_{}_BAR".format(...)")
1093        #
1094        #  - *, meant for comments like '#endif /* CONFIG_FOO_* */
1095
1096        disallowed_symbols = {
1097            "PINCTRL": "Drivers requiring PINCTRL must SELECT it instead.",
1098            "BOARD_EARLY_INIT_HOOK": "Boards requiring hooks must SELECT them instead.",
1099            "BOARD_LATE_INIT_HOOK": "Boards requiring hooks must SELECT them instead.",
1100        }
1101
1102        disallowed_regex = "(" + "|".join(disallowed_symbols.keys()) + ")$"
1103
1104        # Warning: Needs to work with both --perl-regexp and the 're' module
1105        # Windows
1106        if os.name == 'nt':
1107            # Remove word boundaries on Windows implementation
1108            regex_boards = r"CONFIG_[A-Z0-9_]+(?!\s*##|[$@{(.*])"
1109            regex_socs = r"config[ \t]+[A-Z0-9_]+"
1110        else:
1111            regex_boards = r"\bCONFIG_[A-Z0-9_]+\b(?!\s*##|[$@{(.*])"
1112            regex_socs = r"\bconfig\s+[A-Z0-9_]+$"
1113
1114        grep_stdout_boards = git(
1115            "grep",
1116            "--line-number",
1117            "-I",
1118            "--null",
1119            "--perl-regexp",
1120            regex_boards,
1121            "--",
1122            ":boards",
1123            cwd=ZEPHYR_BASE,
1124        ).splitlines()
1125        grep_stdout_socs = git(
1126            "grep",
1127            "--line-number",
1128            "-I",
1129            "--null",
1130            "--perl-regexp",
1131            regex_socs,
1132            "--",
1133            ":soc",
1134            cwd=ZEPHYR_BASE,
1135        ).splitlines()
1136
1137        manifest = Manifest.from_file()
1138        for project in manifest.get_projects([]):
1139            if not manifest.is_active(project):
1140                continue
1141
1142            if not project.is_cloned():
1143                continue
1144
1145            module_path = PurePath(project.abspath)
1146            module_yml = module_path.joinpath('zephyr/module.yml')
1147
1148            if not Path(module_yml).is_file():
1149                module_yml = module_path.joinpath('zephyr/module.yaml')
1150
1151            if Path(module_yml).is_file():
1152                with Path(module_yml).open('r', encoding='utf-8') as f:
1153                    meta = yaml.load(f.read(), Loader=SafeLoader)
1154
1155                    if 'build' in meta and 'settings' in meta['build']:
1156                        grep_stdout_boards.extend(
1157                            self.module_disallowed_check(
1158                                module_path, 'board', 'boards', meta, regex_boards
1159                            )
1160                        )
1161                        grep_stdout_socs.extend(
1162                            self.module_disallowed_check(
1163                                module_path, 'soc', 'soc', meta, regex_socs
1164                            )
1165                        )
1166
1167        # Board processing
1168        # splitlines() supports various line terminators
1169        for grep_line in grep_stdout_boards:
1170            path, lineno, line = grep_line.split("\0")
1171
1172            # Extract symbol references (might be more than one) within the line
1173            for sym_name in re.findall(regex_boards, line):
1174                sym_name = sym_name[len("CONFIG_") :]
1175                # Only check in Kconfig fragment files, references might exist in documentation
1176                if re.match(disallowed_regex, sym_name) and (
1177                    path[-len("conf") :] == "conf" or path[-len("defconfig") :] == "defconfig"
1178                ):
1179                    reason = disallowed_symbols.get(sym_name)
1180                    self.fmtd_failure(
1181                        "error",
1182                        "BoardDisallowedKconfigs",
1183                        path,
1184                        lineno,
1185                        desc=f"""
1186Found disallowed Kconfig symbol in board Kconfig files: CONFIG_{sym_name:35}
1187{reason}
1188""",
1189                    )
1190
1191        # SoCs processing
1192        # splitlines() supports various line terminators
1193        for grep_line in grep_stdout_socs:
1194            path, lineno, line = grep_line.split("\0")
1195
1196            # Extract symbol references (might be more than one) within the line
1197            for sym_name in re.findall(regex_socs, line):
1198                sym_name = sym_name[len("config") :].strip()
1199                # Only check in Kconfig defconfig files
1200                if re.match(disallowed_regex, sym_name) and "defconfig" in path:
1201                    reason = disallowed_symbols.get(sym_name, "Unknown reason")
1202                    self.fmtd_failure(
1203                        "error",
1204                        "SoCDisallowedKconfigs",
1205                        path,
1206                        lineno,
1207                        desc=f"""
1208Found disallowed Kconfig symbol in SoC Kconfig files: {sym_name:35}
1209{reason}
1210""",
1211                    )
1212
1213    def get_defined_syms(self, kconf):
1214        # Returns a set() with the names of all defined Kconfig symbols (with no
1215        # 'CONFIG_' prefix). This is complicated by samples and tests defining
1216        # their own Kconfig trees. For those, just grep for 'config FOO' to find
1217        # definitions. Doing it "properly" with Kconfiglib is still useful for
1218        # the main tree, because some symbols are defined using preprocessor
1219        # macros.
1220
1221        # Warning: Needs to work with both --perl-regexp and the 're' module.
1222        # (?:...) is a non-capturing group.
1223        regex = r"^\s*(?:menu)?config\s*([A-Z0-9_]+)\s*(?:#|$)"
1224
1225        # Grep samples/ and tests/ for symbol definitions in all modules
1226        grep_stdout = self.module_kconfigs(regex)
1227
1228        # Generate combined list of configs and choices from the main Kconfig tree.
1229        kconf_syms = kconf.unique_defined_syms + kconf.unique_choices
1230
1231        # Symbols from the main Kconfig tree + grepped definitions from samples
1232        # and tests
1233        return set(
1234            [sym.name for sym in kconf_syms] + re.findall(regex, grep_stdout, re.MULTILINE)
1235        ).union(self.get_logging_syms(kconf))
1236
1237    def check_top_menu_not_too_long(self, kconf):
1238        """
1239        Checks that there aren't too many items in the top-level menu (which
1240        might be a sign that stuff accidentally got added there)
1241        """
1242        max_top_items = 50
1243
1244        n_top_items = 0
1245        node = kconf.top_node.list
1246        while node:
1247            # Only count items with prompts. Other items will never be
1248            # shown in the menuconfig (outside show-all mode).
1249            if node.prompt:
1250                n_top_items += 1
1251            node = node.next
1252
1253        if n_top_items > max_top_items:
1254            self.failure(f"""
1255Expected no more than {max_top_items} potentially visible items (items with
1256prompts) in the top-level Kconfig menu, found {n_top_items} items. If you're
1257deliberately adding new entries, then bump the 'max_top_items' variable in
1258{__file__}.""")
1259
1260    def check_no_redefined_in_defconfig(self, kconf):
1261        # Checks that no symbols are (re)defined in defconfigs.
1262
1263        for node in kconf.node_iter():
1264            # 'kconfiglib' is global
1265            # pylint: disable=undefined-variable
1266            if "defconfig" in node.filename and (node.prompt or node.help):
1267                name = (
1268                    node.item.name
1269                    if node.item not in (kconfiglib.MENU, kconfiglib.COMMENT)
1270                    else str(node)
1271                )
1272                self.failure(f"""
1273Kconfig node '{name}' found with prompt or help in {node.filename}.
1274Options must not be defined in defconfig files.
1275""")
1276                continue
1277
1278    def check_no_enable_in_boolean_prompt(self, kconf):
1279        # Checks that boolean's prompt does not start with "Enable...".
1280
1281        for node in kconf.node_iter():
1282            # skip Kconfig nodes not in-tree (will present an absolute path)
1283            if os.path.isabs(node.filename):
1284                continue
1285
1286            # 'kconfiglib' is global
1287            # pylint: disable=undefined-variable
1288
1289            # only process boolean symbols with a prompt
1290            if (
1291                not isinstance(node.item, kconfiglib.Symbol)
1292                or node.item.type != kconfiglib.BOOL
1293                or not node.prompt
1294                or not node.prompt[0]
1295            ):
1296                continue
1297
1298            if re.match(r"^[Ee]nable.*", node.prompt[0]):
1299                self.failure(f"""
1300Boolean option '{node.item.name}' prompt must not start with 'Enable...'. Please
1301check Kconfig guidelines.
1302""")
1303                continue
1304
1305    def check_no_pointless_menuconfigs(self, kconf):
1306        # Checks that there are no pointless 'menuconfig' symbols without
1307        # children in the Kconfig files
1308
1309        bad_mconfs = []
1310        for node in kconf.node_iter():
1311            # 'kconfiglib' is global
1312            # pylint: disable=undefined-variable
1313
1314            # Avoid flagging empty regular menus and choices, in case people do
1315            # something with 'osource' (could happen for 'menuconfig' symbols
1316            # too, though it's less likely)
1317            if node.is_menuconfig and not node.list and isinstance(node.item, kconfiglib.Symbol):
1318                bad_mconfs.append(node)
1319
1320        if bad_mconfs:
1321            self.failure(
1322                """\
1323Found pointless 'menuconfig' symbols without children. Use regular 'config'
1324symbols instead. See
1325https://docs.zephyrproject.org/latest/build/kconfig/tips.html#menuconfig-symbols.
1326
1327"""
1328                + "\n".join(
1329                    f"{node.item.name:35} {node.filename}:{node.linenr}" for node in bad_mconfs
1330                )
1331            )
1332
1333    def check_no_undef_within_kconfig(self, kconf):
1334        """
1335        Checks that there are no references to undefined Kconfig symbols within
1336        the Kconfig files
1337        """
1338        undef_ref_warnings = "\n\n\n".join(
1339            warning for warning in kconf.warnings if "undefined symbol" in warning
1340        )
1341
1342        if undef_ref_warnings:
1343            self.failure(f"Undefined Kconfig symbols:\n\n {undef_ref_warnings}")
1344
1345    def check_soc_name_sync(self, kconf):
1346        root_args = argparse.Namespace(**{'soc_roots': [ZEPHYR_BASE]})
1347        v2_systems = list_hardware.find_v2_systems(root_args)
1348
1349        soc_names = {soc.name for soc in v2_systems.get_socs()}
1350
1351        soc_kconfig_names = set()
1352        for node in kconf.node_iter():
1353            # 'kconfiglib' is global
1354            # pylint: disable=undefined-variable
1355            if isinstance(node.item, kconfiglib.Symbol) and node.item.name == "SOC":
1356                n = node.item
1357                for d in n.defaults:
1358                    soc_kconfig_names.add(d[0].name)
1359
1360        soc_name_warnings = []
1361        for name in soc_names:
1362            if name not in soc_kconfig_names:
1363                soc_name_warnings.append(f"soc name: {name} not found in CONFIG_SOC defaults.")
1364
1365        if soc_name_warnings:
1366            soc_name_warning_str = '\n'.join(soc_name_warnings)
1367            self.failure(f'''
1368Missing SoC names or CONFIG_SOC vs soc.yml out of sync:
1369
1370{soc_name_warning_str}
1371''')
1372
1373    def check_no_undef_outside_kconfig(self, kconf):
1374        """
1375        Checks that there are no references to undefined Kconfig symbols
1376        outside Kconfig files (any CONFIG_FOO where no FOO symbol exists)
1377        """
1378        # Grep for symbol references.
1379        #
1380        # Example output line for a reference to CONFIG_FOO at line 17 of
1381        # foo/bar.c:
1382        #
1383        #   foo/bar.c<null>17<null>#ifdef CONFIG_FOO
1384        #
1385        # 'git grep --only-matching' would get rid of the surrounding context
1386        # ('#ifdef '), but it was added fairly recently (second half of 2018),
1387        # so we extract the references from each line ourselves instead.
1388        #
1389        # The regex uses word boundaries (\b) to isolate the reference, and
1390        # negative lookahead to automatically allowlist the following:
1391        #
1392        #  - ##, for token pasting (CONFIG_FOO_##X)
1393        #
1394        #  - $, e.g. for CMake variable expansion (CONFIG_FOO_${VAR})
1395        #
1396        #  - @, e.g. for CMakes's configure_file() (CONFIG_FOO_@VAR@)
1397        #
1398        #  - {, e.g. for Python scripts ("CONFIG_FOO_{}_BAR".format(...)")
1399        #
1400        #  - *, meant for comments like '#endif /* CONFIG_FOO_* */
1401
1402        defined_syms = self.get_defined_syms(kconf)
1403
1404        # Maps each undefined symbol to a list <filename>:<linenr> strings
1405        undef_to_locs = collections.defaultdict(list)
1406
1407        # Warning: Needs to work with both --perl-regexp and the 're' module
1408        regex = r"\b" + self.CONFIG_ + r"[A-Z0-9_]+\b(?!\s*##|[$@{(.*])"
1409
1410        # Skip doc/releases and doc/security/vulnerabilities.rst, which often
1411        # reference removed symbols
1412        grep_stdout = git(
1413            "grep",
1414            "--line-number",
1415            "-I",
1416            "--null",
1417            "--perl-regexp",
1418            regex,
1419            "--",
1420            ":!/doc/releases",
1421            ":!/doc/security/vulnerabilities.rst",
1422            cwd=GIT_TOP,
1423        )
1424
1425        # splitlines() supports various line terminators
1426        for grep_line in grep_stdout.splitlines():
1427            path, lineno, line = grep_line.split("\0")
1428
1429            # Extract symbol references (might be more than one) within the
1430            # line
1431            for sym_name in re.findall(regex, line):
1432                sym_name = sym_name[len(self.CONFIG_) :]  # Strip CONFIG_
1433                if (
1434                    sym_name not in defined_syms
1435                    and sym_name not in self.UNDEF_KCONFIG_ALLOWLIST
1436                    and not (sym_name.endswith("_MODULE") and sym_name[:-7] in defined_syms)
1437                    and not sym_name.startswith("BOARD_REVISION_")
1438                    and not (sym_name.startswith("DT_HAS_") and sym_name.endswith("_ENABLED"))
1439                ):
1440                    undef_to_locs[sym_name].append(f"{path}:{lineno}")
1441
1442        if not undef_to_locs:
1443            return
1444
1445        # String that describes all referenced but undefined Kconfig symbols,
1446        # in alphabetical order, along with the locations where they're
1447        # referenced. Example:
1448        #
1449        #   CONFIG_ALSO_MISSING    arch/xtensa/core/fatal.c:273
1450        #   CONFIG_MISSING         arch/xtensa/core/fatal.c:264, subsys/fb/cfb.c:20
1451        undef_desc = "\n".join(
1452            f"{self.CONFIG_}{sym_name:35} {', '.join(locs)}"
1453            for sym_name, locs in sorted(undef_to_locs.items())
1454        )
1455
1456        self.failure(f"""
1457Found references to undefined Kconfig symbols. If any of these are false
1458positives, then add them to UNDEF_KCONFIG_ALLOWLIST in {__file__}.
1459
1460If the reference is for a comment like /* CONFIG_FOO_* */ (or
1461/* CONFIG_FOO_*_... */), then please use exactly that form (with the '*'). The
1462CI check knows not to flag it.
1463
1464More generally, a reference followed by $, @, {{, (, ., *, or ## will never be
1465flagged.
1466
1467{undef_desc}""")
1468
1469    # Many of these are symbols used as examples. Note that the list is sorted
1470    # alphabetically, and skips the CONFIG_ prefix.
1471    UNDEF_KCONFIG_ALLOWLIST = {
1472        # zephyr-keep-sorted-start re(^\s+")
1473        "ALSO_MISSING",
1474        "APP_LINK_WITH_",
1475        # Application log level is not detected correctly as
1476        # the option is defined using a template, so it can't
1477        # be grepped
1478        "APP_LOG_LEVEL",
1479        "APP_LOG_LEVEL_DBG",
1480        # The ARMCLANG_STD_LIBC is defined in the
1481        # toolchain Kconfig which is sourced based on
1482        # Zephyr toolchain variant and therefore not
1483        # visible to compliance.
1484        "ARMCLANG_STD_LIBC",
1485        "BINDESC_",  # Used in documentation as a prefix
1486        "BOARD_",  # Used as regex in scripts/utils/board_v1_to_v2.py
1487        "BOARD_MPS2_AN521_CPUTEST",  # Used for board and SoC extension feature tests
1488        "BOARD_NATIVE_SIM_NATIVE_64_TWO",  # Used for board and SoC extension feature tests
1489        "BOARD_NATIVE_SIM_NATIVE_ONE",  # Used for board and SoC extension feature tests
1490        "BOARD_UNIT_TESTING",  # Used for tests/unit
1491        "BOOT_DIRECT_XIP",  # Used in sysbuild for MCUboot configuration
1492        "BOOT_DIRECT_XIP_REVERT",  # Used in sysbuild for MCUboot configuration
1493        "BOOT_ENCRYPTION_KEY_FILE",  # Used in sysbuild
1494        "BOOT_ENCRYPT_ALG_AES_128",  # Used in sysbuild
1495        "BOOT_ENCRYPT_ALG_AES_256",  # Used in sysbuild
1496        "BOOT_ENCRYPT_IMAGE",  # Used in sysbuild
1497        "BOOT_FIRMWARE_LOADER",  # Used in sysbuild for MCUboot configuration
1498        "BOOT_FIRMWARE_LOADER_BOOT_MODE",  # Used in sysbuild for MCUboot configuration
1499        "BOOT_IMAGE_EXECUTABLE_RAM_SIZE",  # MCUboot setting
1500        "BOOT_IMAGE_EXECUTABLE_RAM_START",  # MCUboot setting
1501        "BOOT_MAX_IMG_SECTORS_AUTO",  # Used in sysbuild
1502        "BOOT_RAM_LOAD",  # Used in sysbuild for MCUboot configuration
1503        "BOOT_RAM_LOAD_REVERT",  # Used in sysbuild for MCUboot configuration
1504        "BOOT_SERIAL_BOOT_MODE",  # Used in (sysbuild-based) test/documentation
1505        "BOOT_SERIAL_CDC_ACM",  # Used in (sysbuild-based) test
1506        "BOOT_SERIAL_ENTRANCE_GPIO",  # Used in (sysbuild-based) test
1507        "BOOT_SERIAL_IMG_GRP_HASH",  # Used in documentation
1508        "BOOT_SERIAL_UART",  # Used in (sysbuild-based) test
1509        "BOOT_SHARE_BACKEND_RETENTION",  # Used in Kconfig text
1510        "BOOT_SHARE_DATA",  # Used in Kconfig text
1511        "BOOT_SHARE_DATA_BOOTINFO",  # Used in (sysbuild-based) test
1512        "BOOT_SIGNATURE_KEY_FILE",  # MCUboot setting used by sysbuild
1513        "BOOT_SIGNATURE_TYPE_ECDSA_P256",  # MCUboot setting used by sysbuild
1514        "BOOT_SIGNATURE_TYPE_ED25519",  # MCUboot setting used by sysbuild
1515        "BOOT_SIGNATURE_TYPE_NONE",  # MCUboot setting used by sysbuild
1516        "BOOT_SIGNATURE_TYPE_RSA",  # MCUboot setting used by sysbuild
1517        "BOOT_SWAP_USING_MOVE",  # Used in sysbuild for MCUboot configuration
1518        "BOOT_SWAP_USING_OFFSET",  # Used in sysbuild for MCUboot configuration
1519        "BOOT_SWAP_USING_SCRATCH",  # Used in sysbuild for MCUboot configuration
1520        # Used in example adjusting MCUboot config, but
1521        # symbol is defined in MCUboot itself.
1522        "BOOT_UPGRADE_ONLY",
1523        "BOOT_VALIDATE_SLOT0",  # Used in (sysbuild-based) test
1524        "BOOT_WATCHDOG_FEED",  # Used in (sysbuild-based) test
1525        "BT_6LOWPAN",  # Defined in Linux, mentioned in docs
1526        "CDC_ACM_PORT_NAME_",
1527        "CHRE",  # Optional module
1528        "CHRE_LOG_LEVEL_DBG",  # Optional module
1529        "CLOCK_STM32_SYSCLK_SRC_",
1530        "CMD_CACHE",  # Defined in U-Boot, mentioned in docs
1531        "CMU",
1532        "COMPILER_RT_RTLIB",
1533        "CRC",  # Used in TI CC13x2 / CC26x2 SDK comment
1534        "DEEP_SLEEP",  # #defined by RV32M1 in ext/
1535        "DESCRIPTION",
1536        "ERR",
1537        "ESP_DIF_LIBRARY",  # Referenced in CMake comment
1538        "EXPERIMENTAL",
1539        "EXTRA_FIRMWARE_DIR",  # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1540        "FFT",  # Used as an example in cmake/extensions.cmake
1541        "FLAG",  # Used as an example
1542        "FOO",
1543        "FOO_LOG_LEVEL",
1544        "FOO_SETTING_1",
1545        "FOO_SETTING_2",
1546        "HEAP_MEM_POOL_ADD_SIZE_",  # Used as an option matching prefix
1547        "HUGETLBFS",  # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1548        "IAR_BUFFERED_WRITE",
1549        "IAR_DATA_INIT",
1550        "IAR_LIBCPP",
1551        "IAR_SEMIHOSTING",
1552        "IAR_ZEPHYR_INIT",
1553        # Used in ICMsg tests for intercompatibility
1554        # with older versions of the ICMsg.
1555        "IPC_SERVICE_ICMSG_BOND_NOTIFY_REPEAT_TO_MS",
1556        "LIBGCC_RTLIB",
1557        "LLVM_USE_LD",  # Both LLVM_USE_* are in cmake/toolchain/llvm/Kconfig
1558        # which are only included if LLVM is selected but
1559        # not other toolchains. Compliance check would complain,
1560        # for example, if you are using GCC.
1561        "LLVM_USE_LLD",
1562        "LOG_BACKEND_MOCK_OUTPUT_DEFAULT",  # Referenced in tests/subsys/logging/log_syst
1563        "LOG_BACKEND_MOCK_OUTPUT_SYST",  # Referenced in testcase.yaml of log_syst test
1564        "LSM6DSO_INT_PIN",
1565        "MCUBOOT_ACTION_HOOKS",  # Used in (sysbuild-based) test
1566        "MCUBOOT_CLEANUP_ARM_CORE",  # Used in (sysbuild-based) test
1567        "MCUBOOT_DOWNGRADE_PREVENTION",  # but symbols are defined in MCUboot itself.
1568        "MCUBOOT_LOG_LEVEL_DBG",
1569        "MCUBOOT_LOG_LEVEL_INF",
1570        "MCUBOOT_LOG_LEVEL_WRN",  # Used in example adjusting MCUboot config,
1571        "MCUBOOT_SERIAL",  # Used in (sysbuild-based) test/documentation
1572        "MCUMGR_GRP_EXAMPLE_OTHER_HOOK",  # Used in documentation
1573        # Used in modules/hal_nxp/mcux/mcux-sdk-ng/device/device.cmake.
1574        # It is a variable used by MCUX SDK CMake.
1575        "MCUX_HW_CORE",
1576        # Used in modules/hal_nxp/mcux/mcux-sdk-ng/device/device.cmake.
1577        # It is a variable used by MCUX SDK CMake.
1578        "MCUX_HW_DEVICE_CORE",
1579        # Used in modules/hal_nxp/mcux/mcux-sdk-ng/device/device.cmake.
1580        # It is a variable used by MCUX SDK CMake.
1581        "MCUX_HW_FPU_TYPE",
1582        "MISSING",
1583        "MODULES",
1584        "MODVERSIONS",  # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1585        "MYFEATURE",
1586        "MY_DRIVER_0",
1587        "NORMAL_SLEEP",  # #defined by RV32M1 in ext/
1588        "NRF_WIFI_FW_BIN",  # Directly passed from CMakeLists.txt
1589        "OPT",
1590        "OPT_0",
1591        "PEDO_THS_MIN",
1592        "PSA_H",  # This is used in config-psa.h as guard for the header file
1593        "REG1",
1594        "REG2",
1595        "RIMAGE_SIGNING_SCHEMA",  # Optional module
1596        "SECURITY_LOADPIN",  # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1597        "SEL",
1598        "SHIFT",
1599        "SINGLE_APPLICATION_SLOT",  # Used in sysbuild for MCUboot configuration
1600        "SINGLE_APPLICATION_SLOT_RAM_LOAD",  # Used in sysbuild for MCUboot configuration
1601        "SOC_NORDIC_BSP_PATH_OVERRIDE",  # Used in modules/hal_nordic/nrfx/CMakeLists.txt
1602        "SOC_SDKNG_UNSUPPORTED",  # Used in modules/hal_nxp/mcux/CMakeLists.txt
1603        "SOC_SERIES_",  # Used as regex in scripts/utils/board_v1_to_v2.py
1604        "SOC_WATCH",  # Issue 13749
1605        "SOME_BOOL",
1606        "SOME_INT",
1607        "SOME_OTHER_BOOL",
1608        "SOME_STRING",
1609        "SRAM2",  # Referenced in a comment in samples/application_development
1610        "STACK_SIZE",  # Used as an example in the Kconfig docs
1611        "STD_CPP",  # Referenced in CMake comment
1612        "TEST1",
1613        # Defined in modules/hal_nxp/mcux/mcux-sdk-ng/basic.cmake.
1614        # It is used by MCUX SDK cmake functions to add content
1615        # based on current toolchain.
1616        "TOOLCHAIN",
1617        # The symbol is defined in the toolchain
1618        # Kconfig which is sourced based on Zephyr
1619        # toolchain variant and therefore not visible
1620        # to compliance.
1621        "TOOLCHAIN_ARCMWDT_SUPPORTS_THREAD_LOCAL_STORAGE",
1622        "TYPE_BOOLEAN",
1623        "USB_CONSOLE",
1624        "USE_STDC_",
1625        "WHATEVER",
1626        "ZEPHYR_TRY_MASS_ERASE",  # MCUBoot setting described in sysbuild documentation
1627        "ZTEST_FAIL_TEST_",  # regex in tests/ztest/fail/CMakeLists.txt
1628        "ZVFS_OPEN_ADD_SIZE_",  # Used as an option matching prefix
1629        # zephyr-keep-sorted-stop
1630    }
1631
1632
1633class KconfigBasicCheck(KconfigCheck):
1634    """
1635    Checks if we are introducing any new warnings/errors with Kconfig,
1636    for example using undefined Kconfig variables.
1637    This runs the basic Kconfig test, which is checking only for undefined
1638    references inside the Kconfig tree.
1639    """
1640
1641    name = "KconfigBasic"
1642
1643    def check_no_undef_outside_kconfig(self, kconf):
1644        pass
1645
1646
1647class KconfigBasicNoModulesCheck(KconfigBasicCheck):
1648    """
1649    Checks if we are introducing any new warnings/errors with Kconfig when no
1650    modules are available. Catches symbols used in the main repository but
1651    defined only in a module.
1652    """
1653
1654    name = "KconfigBasicNoModules"
1655    path_hint = "<zephyr-base>"
1656    EMPTY_FILE_CONTENTS = "# Empty\n"
1657
1658    def get_modules(self, module_dirs_file, modules_file, sysbuild_modules_file, settings_file):
1659        with open(module_dirs_file, 'w') as fp_module_file:
1660            fp_module_file.write(self.EMPTY_FILE_CONTENTS)
1661
1662        with open(modules_file, 'w') as fp_module_file:
1663            fp_module_file.write(self.EMPTY_FILE_CONTENTS)
1664
1665        with open(sysbuild_modules_file, 'w') as fp_module_file:
1666            fp_module_file.write(self.EMPTY_FILE_CONTENTS)
1667
1668
1669class KconfigHWMv2Check(KconfigBasicCheck):
1670    """
1671    This runs the Kconfig test for board and SoC v2 scheme.
1672    This check ensures that all symbols inside the v2 scheme is also defined
1673    within the same tree.
1674    This ensures the board and SoC trees are fully self-contained and reusable.
1675    """
1676
1677    name = "KconfigHWMv2"
1678
1679    # Use dedicated Kconfig board / soc v2 scheme file.
1680    # This file sources only v2 scheme tree.
1681    FILENAME = os.path.join(os.path.dirname(__file__), "Kconfig.board.v2")
1682
1683
1684class SysbuildKconfigCheck(KconfigCheck):
1685    """
1686    Checks if we are introducing any new warnings/errors with sysbuild Kconfig,
1687    for example using undefined Kconfig variables.
1688    """
1689
1690    name = "SysbuildKconfig"
1691
1692    FILENAME = "share/sysbuild/Kconfig"
1693    CONFIG_ = "SB_CONFIG_"
1694
1695    # A different allowlist is used for symbols prefixed with SB_CONFIG_ (omitted here).
1696    UNDEF_KCONFIG_ALLOWLIST = {
1697        # zephyr-keep-sorted-start re(^\s+")
1698        "FOO",
1699        "MY_IMAGE",  # Used in sysbuild documentation as example
1700        "OTHER_APP_IMAGE_NAME",  # Used in sysbuild documentation as example
1701        "OTHER_APP_IMAGE_PATH",  # Used in sysbuild documentation as example
1702        "SECOND_SAMPLE",  # Used in sysbuild documentation
1703        # zephyr-keep-sorted-stop
1704    }
1705
1706
1707class SysbuildKconfigBasicCheck(SysbuildKconfigCheck, KconfigBasicCheck):
1708    """
1709    Checks if we are introducing any new warnings/errors with sysbuild Kconfig,
1710    for example using undefined Kconfig variables.
1711    This runs the basic Kconfig test, which is checking only for undefined
1712    references inside the sysbuild Kconfig tree.
1713    """
1714
1715    name = "SysbuildKconfigBasic"
1716
1717
1718class SysbuildKconfigBasicNoModulesCheck(SysbuildKconfigCheck, KconfigBasicNoModulesCheck):
1719    """
1720    Checks if we are introducing any new warnings/errors with sysbuild Kconfig
1721    when no modules are available. Catches symbols used in the main repository
1722    but defined only in a module.
1723    """
1724
1725    name = "SysbuildKconfigBasicNoModules"
1726    path_hint = "<zephyr-base>"
1727
1728
1729class Nits(ComplianceTest):
1730    """
1731    Checks various nits in added/modified files. Doesn't check stuff that's
1732    already covered by e.g. checkpatch.pl and pylint.
1733    """
1734
1735    name = "Nits"
1736    doc = zephyr_doc_detail_builder("/contribute/guidelines.html#coding-style")
1737
1738    def run(self):
1739        # Loop through added/modified files
1740        for fname in get_files(filter="d"):
1741            if "Kconfig" in fname:
1742                self.check_kconfig_header(fname)
1743                self.check_redundant_zephyr_source(fname)
1744
1745            if fname.startswith("dts/bindings/"):
1746                self.check_redundant_document_separator(fname)
1747
1748            if (
1749                fname.endswith(
1750                    (
1751                        ".c",
1752                        ".conf",
1753                        ".cpp",
1754                        ".dts",
1755                        ".overlay",
1756                        ".h",
1757                        ".ld",
1758                        ".py",
1759                        ".rst",
1760                        ".txt",
1761                        ".yaml",
1762                        ".yml",
1763                    )
1764                )
1765                or "Kconfig" in fname
1766                or "defconfig" in fname
1767                or fname == "README"
1768            ):
1769                self.check_source_file(fname)
1770
1771    def check_kconfig_header(self, fname):
1772        # Checks for a spammy copy-pasted header format
1773
1774        with open(GIT_TOP / fname, encoding="utf-8") as f:
1775            contents = f.read()
1776
1777        # 'Kconfig - yada yada' has a copy-pasted redundant filename at the
1778        # top. This probably means all of the header was copy-pasted.
1779        if re.match(r"\s*#\s*(K|k)config[\w.-]*\s*-", contents):
1780            self.failure(f"""
1781Please use this format for the header in '{fname}' (see
1782https://docs.zephyrproject.org/latest/build/kconfig/tips.html#header-comments-and-other-nits):
1783
1784    # <Overview of symbols defined in the file, preferably in plain English>
1785    (Blank line)
1786    # Copyright (c) 2019 ...
1787    # SPDX-License-Identifier: <License>
1788    (Blank line)
1789    (Kconfig definitions)
1790
1791Skip the "Kconfig - " part of the first line, since it's clear that the comment
1792is about Kconfig from context. The "# Kconfig - " is what triggers this
1793failure.
1794""")
1795
1796    def check_redundant_zephyr_source(self, fname):
1797        # Checks for 'source "$(ZEPHYR_BASE)/Kconfig[.zephyr]"', which can be
1798        # be simplified to 'source "Kconfig[.zephyr]"'
1799
1800        with open(GIT_TOP / fname, encoding="utf-8") as f:
1801            # Look for e.g. rsource as well, for completeness
1802            match = re.search(
1803                r'^\s*(?:o|r|or)?source\s*"\$\(?ZEPHYR_BASE\)?/(Kconfig(?:\.zephyr)?)"',
1804                f.read(),
1805                re.MULTILINE,
1806            )
1807
1808            if match:
1809                self.failure(
1810                    """
1811Redundant 'source "$(ZEPHYR_BASE)/{0}" in '{1}'. Just do 'source "{0}"'
1812instead. The $srctree environment variable already points to the Zephyr root,
1813and all 'source's are relative to it.""".format(match.group(1), fname)
1814                )
1815
1816    def check_redundant_document_separator(self, fname):
1817        # Looks for redundant '...' document separators in bindings
1818
1819        with open(GIT_TOP / fname, encoding="utf-8") as f:
1820            if re.search(r"^\.\.\.", f.read(), re.MULTILINE):
1821                self.failure(f"""\
1822Redundant '...' document separator in {fname}. Binding YAML files are never
1823concatenated together, so no document separators are needed.""")
1824
1825    def check_source_file(self, fname):
1826        # Generic nits related to various source files
1827
1828        with open(GIT_TOP / fname, encoding="utf-8") as f:
1829            contents = f.read()
1830
1831        if not contents.endswith("\n"):
1832            self.failure(f"Missing newline at end of '{fname}'. Check your text editor settings.")
1833
1834        if contents.startswith("\n"):
1835            self.failure(f"Please remove blank lines at start of '{fname}'")
1836
1837        if contents.endswith("\n\n"):
1838            self.failure(f"Please remove blank lines at end of '{fname}'")
1839
1840
1841class GitDiffCheck(ComplianceTest):
1842    """
1843    Checks for conflict markers or whitespace errors with git diff --check
1844    """
1845
1846    name = "GitDiffCheck"
1847    doc = "Git conflict markers and whitespace errors are not allowed in added changes"
1848
1849    def run(self):
1850        offending_lines = []
1851        # Use regex to filter out unnecessay output
1852        # Reason: `--check` is mutually exclusive with `--name-only` and `-s`
1853        p = re.compile(r"\S+\: .*\.")
1854
1855        for shaidx in get_shas(COMMIT_RANGE):
1856            # Ignore non-zero return status code
1857            # Reason: `git diff --check` sets the return code to the number of offending lines
1858            cmd = ["diff", f"{shaidx}^!", "--check", "--", ":!*.diff", ":!*.patch"]
1859            diff = git(*cmd, ignore_non_zero=True)
1860
1861            lines = p.findall(diff)
1862            lines = map(lambda x: f"{shaidx}: {x}", lines)
1863            offending_lines.extend(lines)
1864
1865        if len(offending_lines) > 0:
1866            self.failure("\n".join(offending_lines))
1867
1868
1869class LicenseAndCopyrightCheck(ComplianceTest):
1870    """
1871    Verify that every file touched by the patch set has correct SPDX headers and uses allowed
1872    license.
1873    """
1874
1875    name = "LicenseAndCopyrightCheck"
1876    doc = "Check SPDX headers and copyright lines with the reuse Python API."
1877
1878    def _report_violations(
1879        self,
1880        paths: Iterable[Path],
1881        title: str,
1882        severity: str,
1883        desc: str | None = None,
1884    ) -> None:
1885        for p in paths:
1886            rel_path = os.path.relpath(str(p), GIT_TOP)
1887            self.fmtd_failure(severity, title, rel_path, desc=desc or "", line=1)
1888
1889    def run(self) -> None:
1890        changed_files = get_files(filter="d")
1891        if not changed_files:
1892            return
1893
1894        # Only scan text files for now, in the future we may want to leverage REUSE standard's
1895        # ability to also associate license/copyright info with binary files.
1896        for file in changed_files:
1897            full_path = GIT_TOP / file
1898            mime_type = magic.from_file(os.fspath(full_path), mime=True)
1899            if not mime_type.startswith("text/"):
1900                changed_files.remove(file)
1901
1902        project = Project.from_directory(GIT_TOP)
1903        report = ProjectSubsetReport.generate(project, changed_files, multiprocessing=False)
1904
1905        self._report_violations(
1906            report.files_without_licenses,
1907            "License missing",
1908            "warning",
1909            "File has no SPDX-License-Identifier header, consider adding one.",
1910        )
1911
1912        self._report_violations(
1913            report.files_without_copyright,
1914            "Copyright missing",
1915            "warning",
1916            "File has no SPDX-FileCopyrightText header, consider adding one.",
1917        )
1918
1919        for lic_id, paths in getattr(report, "missing_licenses", {}).items():
1920            self._report_violations(
1921                paths,
1922                "License may not be allowed",
1923                "warning",
1924                (
1925                    f"License file for '{lic_id}' not found in /LICENSES. Please check "
1926                    "https://docs.zephyrproject.org/latest/contribute/guidelines.html#components-using-other-licenses."
1927                ),
1928            )
1929
1930
1931class GitLint(ComplianceTest):
1932    """
1933    Runs gitlint on the commits and finds issues with style and syntax
1934
1935    """
1936
1937    name = "Gitlint"
1938    doc = zephyr_doc_detail_builder("/contribute/guidelines.html#commit-guidelines")
1939
1940    def run(self):
1941        # By default gitlint looks for .gitlint configuration only in
1942        # the current directory
1943        try:
1944            subprocess.run(
1945                'gitlint --commits ' + COMMIT_RANGE,
1946                check=True,
1947                stdout=subprocess.PIPE,
1948                stderr=subprocess.STDOUT,
1949                shell=True,
1950                cwd=GIT_TOP,
1951            )
1952
1953        except subprocess.CalledProcessError as ex:
1954            self.failure(ex.output.decode("utf-8"))
1955
1956
1957class PyLint(ComplianceTest):
1958    """
1959    Runs pylint on all .py files, with a limited set of checks enabled. The
1960    configuration is in the pylintrc file.
1961    """
1962
1963    name = "Pylint"
1964    doc = "See https://www.pylint.org/ for more details"
1965
1966    def run(self):
1967        # Path to pylint configuration file
1968        pylintrc = os.path.abspath(os.path.join(os.path.dirname(__file__), "pylintrc"))
1969
1970        # Path to additional pylint check scripts
1971        check_script_dir = os.path.abspath(
1972            os.path.join(os.path.dirname(__file__), "../pylint/checkers")
1973        )
1974
1975        # List of files added/modified by the commit(s).
1976        files = get_files(filter="d")
1977
1978        # Filter out everything but Python files. Keep filenames
1979        # relative (to GIT_TOP) to stay farther from any command line
1980        # limit.
1981        py_files = filter_py(GIT_TOP, files)
1982        if not py_files:
1983            return
1984
1985        python_environment = os.environ.copy()
1986        if "PYTHONPATH" in python_environment:
1987            python_environment["PYTHONPATH"] = (
1988                check_script_dir + ":" + python_environment["PYTHONPATH"]
1989            )
1990        else:
1991            python_environment["PYTHONPATH"] = check_script_dir
1992
1993        pylintcmd = [
1994            "pylint",
1995            "--output-format=json2",
1996            "--rcfile=" + pylintrc,
1997            "--load-plugins=argparse-checker",
1998        ] + py_files
1999        logger.info(cmd2str(pylintcmd))
2000        try:
2001            subprocess.run(
2002                pylintcmd,
2003                check=True,
2004                stdout=subprocess.PIPE,
2005                stderr=subprocess.STDOUT,
2006                cwd=GIT_TOP,
2007                env=python_environment,
2008            )
2009        except subprocess.CalledProcessError as ex:
2010            output = ex.output.decode("utf-8")
2011            messages = json.loads(output)['messages']
2012            for m in messages:
2013                severity = 'unknown'
2014                if m['messageId'][0] in ('F', 'E'):
2015                    severity = 'error'
2016                elif m['messageId'][0] in ('W', 'C', 'R', 'I'):
2017                    severity = 'warning'
2018                self.fmtd_failure(
2019                    severity,
2020                    m['messageId'],
2021                    m['path'],
2022                    m['line'],
2023                    col=str(m['column']),
2024                    desc=m['message'] + f" ({m['symbol']})",
2025                )
2026
2027            if len(messages) == 0:
2028                # If there are no specific messages add the whole output as a failure
2029                self.failure(output)
2030
2031
2032def filter_py(root, fnames):
2033    # PyLint check helper. Returns all Python script filenames among the
2034    # filenames in 'fnames', relative to directory 'root'.
2035    #
2036    # Uses the python-magic library, so that we can detect Python
2037    # files that don't end in .py as well. python-magic is a frontend
2038    # to libmagic, which is also used by 'file'.
2039    return [
2040        fname
2041        for fname in fnames
2042        if (
2043            fname.endswith(".py")
2044            or magic.from_file(os.path.join(root, fname), mime=True) == "text/x-python"
2045        )
2046    ]
2047
2048
2049class CMakeStyle(ComplianceTest):
2050    """
2051    Checks cmake style added/modified files
2052    """
2053
2054    name = "CMakeStyle"
2055    doc = zephyr_doc_detail_builder("/contribute/style/cmake.html")
2056
2057    def run(self):
2058        # Loop through added/modified files
2059        for fname in get_files(filter="d"):
2060            if fname.endswith(".cmake") or fname.endswith("CMakeLists.txt"):
2061                self.check_style(fname)
2062
2063    def check_style(self, fname):
2064        SPACE_BEFORE_OPEN_BRACKETS_CHECK = re.compile(r"^\s*if\s+\(")
2065        TAB_INDENTATION_CHECK = re.compile(r"^\t+")
2066
2067        with open(fname, encoding="utf-8") as f:
2068            for line_num, line in enumerate(f.readlines(), start=1):
2069                if TAB_INDENTATION_CHECK.match(line):
2070                    self.fmtd_failure(
2071                        "error",
2072                        "CMakeStyle",
2073                        fname,
2074                        line_num,
2075                        "Use spaces instead of tabs for indentation",
2076                    )
2077
2078                if SPACE_BEFORE_OPEN_BRACKETS_CHECK.match(line):
2079                    self.fmtd_failure(
2080                        "error",
2081                        "CMakeStyle",
2082                        fname,
2083                        line_num,
2084                        "Remove space before '(' in if() statements",
2085                    )
2086
2087
2088class Identity(ComplianceTest):
2089    """
2090    Checks if Emails of author and signed-off messages are consistent.
2091    """
2092
2093    name = "Identity"
2094    doc = zephyr_doc_detail_builder("/contribute/guidelines.html#commit-guidelines")
2095
2096    def run(self):
2097        for shaidx in get_shas(COMMIT_RANGE):
2098            commit_info = git('show', '-s', '--format=%an%n%ae%n%b', shaidx).split('\n', 2)
2099
2100            failures = []
2101
2102            if len(commit_info) == 2:
2103                failures.append(f'{shaidx}: Empty commit message body')
2104                auth_name, auth_email = commit_info
2105                body = ''
2106            elif len(commit_info) == 3:
2107                auth_name, auth_email, body = commit_info
2108            else:
2109                self.failure(f'Unable to parse commit message for {shaidx}')
2110                continue
2111
2112            if auth_email.endswith("@users.noreply.github.com"):
2113                failures.append(
2114                    f"{shaidx}: author email ({auth_email}) must "
2115                    "be a real email and cannot end in "
2116                    "@users.noreply.github.com"
2117                )
2118
2119            # Returns an array of everything to the right of ':' on each signoff line
2120            signoff_lines = re.findall(r"signed-off-by:\s(.*)", body, re.IGNORECASE)
2121            if len(signoff_lines) == 0:
2122                failures.append(f'{shaidx}: Missing signed-off-by line')
2123            else:
2124                # Validate all signoff lines' syntax while also searching for commit author
2125                found_author_signoff = False
2126                for signoff in signoff_lines:
2127                    match = re.search(r"(.+) <(.+)>", signoff)
2128
2129                    if not match:
2130                        failures.append(
2131                            f"{shaidx}: Signed-off-by line ({signoff}) "
2132                            "does not follow the syntax: First "
2133                            "Last <email>."
2134                        )
2135                    elif (auth_name, auth_email) == match.groups():
2136                        found_author_signoff = True
2137
2138                if not found_author_signoff:
2139                    failures.append(
2140                        f"{shaidx}: author name ({auth_name}) and email ({auth_email}) "
2141                        "needs to match one of the signed-off-by entries."
2142                    )
2143
2144            if failures:
2145                self.failure('\n'.join(failures))
2146
2147
2148class BinaryFiles(ComplianceTest):
2149    """
2150    Check that the diff contains no binary files.
2151    """
2152
2153    name = "BinaryFiles"
2154    doc = "No binary files allowed."
2155
2156    def run(self):
2157        BINARY_ALLOW_PATHS = ("doc/", "boards/", "samples/")
2158        # svg files are always detected as binary, see .gitattributes
2159        BINARY_ALLOW_EXT = (".jpg", ".jpeg", ".png", ".svg", ".webp")
2160
2161        for stat in git("diff", "--numstat", "--diff-filter=A", COMMIT_RANGE).splitlines():
2162            added, deleted, fname = stat.split("\t")
2163            if added == "-" and deleted == "-":
2164                if fname.startswith(BINARY_ALLOW_PATHS) and fname.endswith(BINARY_ALLOW_EXT):
2165                    continue
2166                self.failure(f"Binary file not allowed: {fname}")
2167
2168
2169class ImageSize(ComplianceTest):
2170    """
2171    Check that any added image is limited in size.
2172    """
2173
2174    name = "ImageSize"
2175    doc = "Check the size of image files."
2176
2177    def run(self):
2178        SIZE_LIMIT = 250 << 10
2179        BOARD_SIZE_LIMIT = 100 << 10
2180
2181        for file in get_files(filter="d"):
2182            full_path = GIT_TOP / file
2183            mime_type = magic.from_file(os.fspath(full_path), mime=True)
2184
2185            if not mime_type.startswith("image/"):
2186                continue
2187
2188            size = os.path.getsize(full_path)
2189
2190            limit = SIZE_LIMIT
2191            if file.startswith("boards/"):
2192                limit = BOARD_SIZE_LIMIT
2193
2194            if size > limit:
2195                self.failure(
2196                    f"Image file too large: {file} reduce size to less than {limit >> 10}kB"
2197                )
2198
2199
2200class MaintainersFormat(ComplianceTest):
2201    """
2202    Check that MAINTAINERS file parses correctly.
2203    """
2204
2205    name = "MaintainersFormat"
2206    doc = "Check that MAINTAINERS file parses correctly."
2207
2208    def run(self):
2209        MAINTAINERS_FILES = ["MAINTAINERS.yml", "MAINTAINERS.yaml"]
2210
2211        for file in MAINTAINERS_FILES:
2212            if not os.path.exists(file):
2213                continue
2214
2215            try:
2216                Maintainers(file)
2217            except MaintainersError as ex:
2218                self.failure(f"Error parsing {file}: {ex}")
2219
2220
2221class ModulesMaintainers(ComplianceTest):
2222    """
2223    Check that all modules have a MAINTAINERS entry.
2224    """
2225
2226    name = "ModulesMaintainers"
2227    doc = "Check that all modules have a MAINTAINERS entry."
2228
2229    def run(self):
2230        MAINTAINERS_FILES = ["MAINTAINERS.yml", "MAINTAINERS.yaml"]
2231
2232        manifest = Manifest.from_file()
2233
2234        maintainers_file = None
2235        for file in MAINTAINERS_FILES:
2236            if os.path.exists(file):
2237                maintainers_file = file
2238                break
2239        if not maintainers_file:
2240            return
2241
2242        maintainers = Maintainers(maintainers_file)
2243
2244        for project in manifest.get_projects([]):
2245            if not manifest.is_active(project):
2246                continue
2247
2248            if isinstance(project, ManifestProject):
2249                continue
2250
2251            area = f"West project: {project.name}"
2252            if area not in maintainers.areas:
2253                self.failure(f"Missing {maintainers_file} entry for: \"{area}\"")
2254
2255
2256class ZephyrModuleFile(ComplianceTest):
2257    """
2258    Check that no zephyr/module.yml file has been added to the Zephyr repository
2259    """
2260
2261    name = "ZephyrModuleFile"
2262    doc = "Check that no zephyr/module.yml file has been added to the Zephyr repository."
2263
2264    def run(self):
2265        module_files = [
2266            ZEPHYR_BASE / 'zephyr' / 'module.yml',
2267            ZEPHYR_BASE / 'zephyr' / 'module.yaml',
2268        ]
2269
2270        for file in module_files:
2271            if os.path.exists(file):
2272                self.failure("A zephyr module file has been added to the Zephyr repository")
2273                break
2274
2275
2276class YAMLLint(ComplianceTest):
2277    """
2278    YAMLLint
2279    """
2280
2281    name = "YAMLLint"
2282    doc = "Check YAML files with YAMLLint."
2283
2284    def run(self):
2285        config_file = ZEPHYR_BASE / ".yamllint"
2286
2287        for file in get_files(filter="d"):
2288            if Path(file).suffix not in ['.yaml', '.yml']:
2289                continue
2290
2291            yaml_config = config.YamlLintConfig(file=config_file)
2292
2293            if file.startswith(".github/"):
2294                # Tweak few rules for workflow files.
2295                yaml_config.rules["line-length"] = False
2296                yaml_config.rules["truthy"]["allowed-values"].extend(['on', 'off'])
2297            elif file == ".codecov.yml":
2298                yaml_config.rules["truthy"]["allowed-values"].extend(['yes', 'no'])
2299
2300            with open(file) as fp:
2301                for p in linter.run(fp, yaml_config):
2302                    self.fmtd_failure(
2303                        'warning', f'YAMLLint ({p.rule})', file, p.line, col=p.column, desc=p.desc
2304                    )
2305
2306
2307class SphinxLint(ComplianceTest):
2308    """
2309    SphinxLint
2310    """
2311
2312    name = "SphinxLint"
2313    doc = "Check Sphinx/reStructuredText files with sphinx-lint."
2314
2315    # Checkers added/removed to sphinx-lint's default set
2316    DISABLE_CHECKERS = [
2317        "horizontal-tab",
2318        "missing-space-before-default-role",
2319        "trailing-whitespace",
2320    ]
2321    ENABLE_CHECKERS = ["default-role"]
2322
2323    def run(self):
2324        for file in get_files():
2325            if not file.endswith(".rst"):
2326                continue
2327
2328            try:
2329                # sphinx-lint does not expose a public API so interaction is done via CLI
2330                disable_checkers = ','.join(self.DISABLE_CHECKERS)
2331                enable_checkers = ','.join(self.ENABLE_CHECKERS)
2332                subprocess.run(
2333                    f"sphinx-lint -d {disable_checkers} -e {enable_checkers} {file}",
2334                    check=True,
2335                    stdout=subprocess.PIPE,
2336                    stderr=subprocess.STDOUT,
2337                    shell=True,
2338                    cwd=GIT_TOP,
2339                )
2340
2341            except subprocess.CalledProcessError as ex:
2342                for line in ex.output.decode("utf-8").splitlines():
2343                    match = re.match(r"^(.*):(\d+): (.*)$", line)
2344
2345                    if match:
2346                        self.fmtd_failure(
2347                            "error",
2348                            "SphinxLint",
2349                            match.group(1),
2350                            int(match.group(2)),
2351                            desc=match.group(3),
2352                        )
2353
2354
2355class KeepSorted(ComplianceTest):
2356    """
2357    Check for blocks of code or config that should be kept sorted.
2358    """
2359
2360    name = "KeepSorted"
2361    doc = "Check for blocks of code or config that should be kept sorted."
2362
2363    MARKER = "zephyr-keep-sorted"
2364
2365    def block_check_sorted(self, block_data, *, regex, strip, fold, icase):
2366        def _test_indent(txt: str):
2367            return txt.startswith((" ", "\t"))
2368
2369        if regex is None:
2370            block_data = textwrap.dedent(block_data)
2371
2372        lines = block_data.splitlines()
2373        last = ''
2374
2375        for idx, line in enumerate(lines):
2376            if not line.strip():
2377                # Ignore blank lines
2378                continue
2379
2380            if strip is not None:
2381                line = line.strip(strip)
2382
2383            if regex:
2384                # check for regex
2385                if not re.match(regex, line):
2386                    continue
2387            else:
2388                if _test_indent(line):
2389                    continue
2390
2391                if fold:
2392                    # Fold back indented lines after the current one
2393                    for cont in takewhile(_test_indent, lines[idx + 1 :]):
2394                        line += cont.strip()
2395
2396            if icase:
2397                line = line.casefold()
2398
2399            if line < last:
2400                return idx
2401
2402            last = line
2403
2404        return -1
2405
2406    def check_file(self, file, fp):
2407        block_data = ""
2408        in_block = False
2409
2410        start_marker = f"{self.MARKER}-start"
2411        stop_marker = f"{self.MARKER}-stop"
2412        regex_marker = r"re\(([^)]+)\)"
2413        strip_marker = r"strip\(([^)]+)\)"
2414        nofold_marker = "nofold"
2415        ignorecase_marker = "ignorecase"
2416        start_line = 0
2417        regex = None
2418        strip = None
2419        fold = True
2420        icase = False
2421
2422        for line_num, line in enumerate(fp.readlines(), start=1):
2423            if start_marker in line:
2424                if in_block:
2425                    desc = f"nested {start_marker}"
2426                    self.fmtd_failure("error", "KeepSorted", file, line_num, desc=desc)
2427                in_block = True
2428                block_data = ""
2429                start_line = line_num + 1
2430
2431                # Test for a regex block
2432                match = re.search(regex_marker, line)
2433                regex = match.group(1) if match else None
2434
2435                match = re.search(strip_marker, line)
2436                strip = match.group(1) if match else None
2437
2438                fold = nofold_marker not in line
2439                icase = ignorecase_marker in line
2440            elif stop_marker in line:
2441                if not in_block:
2442                    desc = f"{stop_marker} without {start_marker}"
2443                    self.fmtd_failure("error", "KeepSorted", file, line_num, desc=desc)
2444                in_block = False
2445
2446                idx = self.block_check_sorted(
2447                    block_data, regex=regex, strip=strip, fold=fold, icase=icase
2448                )
2449                if idx >= 0:
2450                    desc = f"sorted block has out-of-order line at {start_line + idx}"
2451                    self.fmtd_failure("error", "KeepSorted", file, line_num, desc=desc)
2452            elif in_block:
2453                block_data += line
2454
2455        if in_block:
2456            self.failure(f"unterminated {start_marker} in {file}")
2457
2458    def run(self):
2459        for file in get_files(filter="d"):
2460            file_path = GIT_TOP / file
2461
2462            mime_type = magic.from_file(os.fspath(file_path), mime=True)
2463            if not mime_type.startswith("text/"):
2464                continue
2465
2466            # Text in the Zephyr tree is UTF-8. On Windows, the default text
2467            # encoding depends on the active code page (e.g. GBK), which can
2468            # break local runs with UnicodeDecodeError.
2469            with open(file_path, encoding="utf-8", errors="surrogateescape") as fp:
2470                self.check_file(file, fp)
2471
2472
2473class Ruff(ComplianceTest):
2474    """
2475    Ruff
2476    """
2477
2478    name = "Ruff"
2479    doc = "Check python files with ruff."
2480
2481    def run(self):
2482        try:
2483            subprocess.run(
2484                "ruff check --output-format=json",
2485                check=True,
2486                stdout=subprocess.PIPE,
2487                stderr=subprocess.DEVNULL,
2488                shell=True,
2489                cwd=GIT_TOP,
2490            )
2491        except subprocess.CalledProcessError as ex:
2492            output = ex.output.decode("utf-8")
2493            messages = json.loads(output)
2494            for m in messages:
2495                self.fmtd_failure(
2496                    "error",
2497                    f'Python lint error ({m.get("code")}) see {m.get("url")}',
2498                    m.get("filename"),
2499                    line=m.get("location", {}).get("row"),
2500                    col=m.get("location", {}).get("column"),
2501                    end_line=m.get("end_location", {}).get("row"),
2502                    end_col=m.get("end_location", {}).get("column"),
2503                    desc=m.get("message"),
2504                )
2505
2506        for file in get_files(filter="d"):
2507            if not file.endswith((".py", ".pyi")):
2508                continue
2509
2510            try:
2511                subprocess.run(
2512                    f"ruff format --force-exclude --diff {file}",
2513                    check=True,
2514                    shell=True,
2515                    cwd=GIT_TOP,
2516                )
2517            except subprocess.CalledProcessError:
2518                desc = f"Run 'ruff format {file}'"
2519                self.fmtd_failure("error", "Python format error", file, desc=desc)
2520
2521
2522class PythonCompatCheck(ComplianceTest):
2523    """
2524    Python Compatibility Check
2525    """
2526
2527    name = "PythonCompat"
2528    doc = "Check that Python files are compatible with Zephyr minimum supported Python version."
2529
2530    MAX_VERSION = (3, 10)
2531    MAX_VERSION_STR = f"{MAX_VERSION[0]}.{MAX_VERSION[1]}"
2532
2533    def run(self):
2534        py_files = [f for f in get_files(filter="d") if f.endswith(".py")]
2535        if not py_files:
2536            return
2537        cmd = [
2538            "vermin",
2539            "-f",
2540            "parsable",
2541            "--violations",
2542            f"-t={self.MAX_VERSION_STR}",
2543            "--no-make-paths-absolute",
2544        ] + py_files
2545        try:
2546            result = subprocess.run(cmd, check=False, capture_output=True, cwd=GIT_TOP)
2547        except Exception as ex:
2548            self.error(f"Failed to run vermin: {ex}")
2549        output = result.stdout.decode("utf-8")
2550        failed = False
2551        for line in output.splitlines():
2552            parts = line.split(":")
2553            if len(parts) < 6:
2554                continue
2555            filename, line_number, column, _, py3ver, feature = parts[:6]
2556            if not line_number:
2557                # Ignore all file-level messages
2558                continue
2559
2560            desc = None
2561            if py3ver.startswith('!'):
2562                desc = f"{feature} is known to be incompatible with Python 3."
2563            elif py3ver.startswith('~'):
2564                # "no known reason it won't work", just skip
2565                continue
2566            else:
2567                major, minor = map(int, py3ver.split(".")[:2])
2568                if (major, minor) > self.MAX_VERSION:
2569                    desc = (
2570                        f"{feature} requires Python {major}.{minor}, which is higher than "
2571                        f"Zephyr's minimum supported Python version ({self.MAX_VERSION_STR})."
2572                    )
2573
2574            if desc is not None:
2575                self.fmtd_failure(
2576                    "error",
2577                    "PythonCompat",
2578                    filename,
2579                    line=int(line_number),
2580                    col=int(column) if column else None,
2581                    desc=desc,
2582                )
2583                failed = True
2584        if failed:
2585            self.failure(
2586                "Some Python files use features that are not compatible with Python "
2587                f"{self.MAX_VERSION_STR}."
2588            )
2589
2590
2591class TextEncoding(ComplianceTest):
2592    """
2593    Check that any text file is encoded in ascii or utf-8.
2594    """
2595
2596    name = "TextEncoding"
2597    doc = "Check the encoding of text files."
2598
2599    ALLOWED_CHARSETS = ["us-ascii", "utf-8"]
2600
2601    def run(self):
2602        m = magic.Magic(mime=True, mime_encoding=True)
2603
2604        for file in get_files(filter="d"):
2605            full_path = GIT_TOP / file
2606            mime_type = m.from_file(os.fspath(full_path))
2607
2608            if not mime_type.startswith("text/"):
2609                continue
2610
2611            # format is "text/<type>; charset=<charset>"
2612            if mime_type.rsplit('=')[-1] not in self.ALLOWED_CHARSETS:
2613                desc = f"Text file with unsupported encoding: {file} has mime type {mime_type}"
2614                self.fmtd_failure("error", "TextEncoding", file, desc=desc)
2615
2616
2617def init_logs(cli_arg):
2618    # Initializes logging
2619
2620    global logger
2621
2622    level = os.environ.get('LOG_LEVEL', "WARN")
2623
2624    console = logging.StreamHandler()
2625    console.setFormatter(logging.Formatter('%(levelname)-8s: %(message)s'))
2626
2627    logger = logging.getLogger('')
2628    logger.addHandler(console)
2629    logger.setLevel(cli_arg or level)
2630
2631    logger.info("Log init completed, level=%s", logging.getLevelName(logger.getEffectiveLevel()))
2632
2633
2634def inheritors(klass):
2635    subclasses = set()
2636    work = [klass]
2637    while work:
2638        parent = work.pop()
2639        for child in parent.__subclasses__():
2640            if child not in subclasses:
2641                subclasses.add(child)
2642                work.append(child)
2643    return subclasses
2644
2645
2646def annotate(res):
2647    """
2648    https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#about-workflow-commands
2649    """
2650    msg = res.message.replace('%', '%25').replace('\n', '%0A').replace('\r', '%0D')
2651    notice = (
2652        f'::{res.severity} file={res.file}'
2653        + (f',line={res.line}' if res.line else '')
2654        + (f',col={res.col}' if res.col else '')
2655        + (f',endLine={res.end_line}' if res.end_line else '')
2656        + (f',endColumn={res.end_col}' if res.end_col else '')
2657        + f',title={res.title}::{msg}'
2658    )
2659    print(notice)
2660
2661
2662def resolve_path_hint(hint):
2663    if hint == "<zephyr-base>":
2664        return ZEPHYR_BASE
2665    elif hint == "<git-top>":
2666        return GIT_TOP
2667    else:
2668        return hint
2669
2670
2671def parse_args(argv):
2672    default_range = 'HEAD~1..HEAD'
2673    parser = argparse.ArgumentParser(
2674        description="Check for coding style and documentation warnings.", allow_abbrev=False
2675    )
2676    parser.add_argument(
2677        '-c',
2678        '--commits',
2679        default=default_range,
2680        help=f'''Commit range in the form: a..[b], default is
2681                        {default_range}''',
2682    )
2683    parser.add_argument(
2684        '-o',
2685        '--output',
2686        default="compliance.xml",
2687        help='''Name of outfile in JUnit format,
2688                        default is ./compliance.xml''',
2689    )
2690    parser.add_argument(
2691        '-n',
2692        '--no-case-output',
2693        action="store_true",
2694        help="Do not store the individual test case output.",
2695    )
2696    parser.add_argument('-l', '--list', action="store_true", help="List all checks and exit")
2697    parser.add_argument(
2698        "-v",
2699        "--loglevel",
2700        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
2701        help="python logging level",
2702    )
2703    parser.add_argument(
2704        '-m',
2705        '--module',
2706        action="append",
2707        default=[],
2708        help="Checks to run. All checks by default. (case insensitive)",
2709    )
2710    parser.add_argument(
2711        '-e',
2712        '--exclude-module',
2713        action="append",
2714        default=[],
2715        help="Do not run the specified checks (case insensitive)",
2716    )
2717    parser.add_argument(
2718        '-j',
2719        '--previous-run',
2720        default=None,
2721        help='''Pre-load JUnit results in XML format
2722                        from a previous run and combine with new results.''',
2723    )
2724    parser.add_argument(
2725        '--annotate', action="store_true", help="Print GitHub Actions-compatible annotations."
2726    )
2727
2728    return parser.parse_args(argv)
2729
2730
2731def _main(args):
2732    # The "real" main(), which is wrapped to catch exceptions and report them
2733    # to GitHub. Returns the number of test failures.
2734
2735    # The absolute path of the top-level git directory. Initialize it here so
2736    # that issues running Git can be reported to GitHub.
2737    global GIT_TOP
2738    GIT_TOP = Path(git("rev-parse", "--show-toplevel"))
2739
2740    # The commit range passed in --commit, e.g. "HEAD~3"
2741    global COMMIT_RANGE
2742    COMMIT_RANGE = args.commits
2743
2744    init_logs(args.loglevel)
2745
2746    logger.info(f'Running tests on commit range {COMMIT_RANGE}')
2747
2748    if args.list:
2749        for testcase in sorted(inheritors(ComplianceTest), key=lambda x: x.name):
2750            print(testcase.name)
2751        return 0
2752
2753    # Load saved test results from an earlier run, if requested
2754    if args.previous_run:
2755        if not os.path.exists(args.previous_run):
2756            # This probably means that an earlier pass had an internal error
2757            # (the script is currently run multiple times by the ci-pipelines
2758            # repo). Since that earlier pass might've posted an error to
2759            # GitHub, avoid generating a GitHub comment here, by avoiding
2760            # sys.exit() (which gets caught in main()).
2761            print(f"error: '{args.previous_run}' not found", file=sys.stderr)
2762            return 1
2763
2764        logging.info(f"Loading previous results from {args.previous_run}")
2765        for loaded_suite in JUnitXml.fromfile(args.previous_run):
2766            suite = loaded_suite
2767            break
2768    else:
2769        suite = TestSuite("Compliance")
2770
2771    included = list(map(lambda x: x.lower(), args.module))
2772    excluded = list(map(lambda x: x.lower(), args.exclude_module))
2773
2774    for testcase in inheritors(ComplianceTest):
2775        # "Modules" and "testcases" are the same thing. Better flags would have
2776        # been --tests and --exclude-tests or the like, but it's awkward to
2777        # change now.
2778
2779        if included and testcase.name.lower() not in included:
2780            continue
2781
2782        if testcase.name.lower() in excluded:
2783            print("Skipping " + testcase.name)
2784            continue
2785
2786        test = testcase()
2787        try:
2788            print(f"Running {test.name:30} tests in {resolve_path_hint(test.path_hint)} ...")
2789            test.run()
2790        except EndTest:
2791            pass
2792        except BaseException:
2793            test.failure(f"An exception occurred in {test.name}:\n{traceback.format_exc()}")
2794
2795        # Annotate if required
2796        if args.annotate:
2797            for res in test.fmtd_failures:
2798                annotate(res)
2799
2800        suite.add_testcase(test.case)
2801
2802    if args.output:
2803        xml = JUnitXml()
2804        xml.add_testsuite(suite)
2805        xml.update_statistics()
2806        xml.write(args.output, pretty=True)
2807
2808    failed_cases = []
2809    warning_cases = []
2810    name2doc = {testcase.name: testcase.doc for testcase in inheritors(ComplianceTest)}
2811
2812    for case in suite:
2813        if case.result:
2814            if case.is_skipped:
2815                logging.warning(f"Skipped {case.name}")
2816            else:
2817                if any(res.type in ('error', 'failure') for res in case.result):
2818                    failed_cases.append(case)
2819                else:
2820                    warning_cases.append(case)
2821        else:
2822            # Some checks can produce no .result
2823            logging.info(f"No JUnit result for {case.name}")
2824
2825    n_fails = len(failed_cases)
2826    n_warnings = len(warning_cases)
2827
2828    if n_fails or n_warnings:
2829        if n_fails:
2830            print(f"{n_fails} check(s) failed")
2831        if n_warnings:
2832            print(f"{n_warnings} check(s) with warnings only")
2833
2834        for case in failed_cases + warning_cases:
2835            for res in case.result:
2836                errmsg = res.text.strip()
2837                if res.type in ('error', 'failure'):
2838                    logging.error(f"Test {case.name} failed: \n{errmsg}")
2839                else:
2840                    logging.warning(f"Test {case.name} warning: \n{errmsg}")
2841
2842            if args.no_case_output:
2843                continue
2844            with open(f"{case.name}.txt", "w") as f:
2845                docs = name2doc.get(case.name)
2846                f.write(f"{docs}\n")
2847                for res in case.result:
2848                    errmsg = res.text.strip()
2849                    f.write(f'\n {errmsg}')
2850
2851    if args.output:
2852        print(f"\nComplete results in {args.output}")
2853    return n_fails
2854
2855
2856def main(argv=None):
2857    args = parse_args(argv)
2858
2859    try:
2860        n_fails = _main(args)
2861    except BaseException:
2862        # Catch BaseException instead of Exception to include stuff like
2863        # SystemExit (raised by sys.exit())
2864        print(f"Python exception in `{__file__}`:\n\n```\n{traceback.format_exc()}\n```")
2865
2866        raise
2867
2868    sys.exit(n_fails)
2869
2870
2871def cmd2str(cmd):
2872    # Formats the command-line arguments in the iterable 'cmd' into a string,
2873    # for error messages and the like
2874
2875    return " ".join(shlex.quote(word) for word in cmd)
2876
2877
2878def err(msg):
2879    cmd = sys.argv[0]  # Empty if missing
2880    if cmd:
2881        cmd += ": "
2882    sys.exit(f"{cmd} error: {msg}")
2883
2884
2885if __name__ == "__main__":
2886    main(sys.argv[1:])
2887