1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# Copyright 2025 NXP
4# SPDX-License-Identifier: BSD-3-Clause
5
6# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
7
8"""
9Library for working with devicetrees at a higher level compared to dtlib. Like
10dtlib, this library presents a tree of devicetree nodes, but the nodes are
11augmented with information from bindings and include some interpretation of
12properties. Some of this interpretation is based on conventions established
13by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
14source code is sometimes good reference material.
15
16Bindings are YAML files that describe devicetree nodes. Devicetree
17nodes are usually mapped to bindings via their 'compatible = "..."' property,
18but a binding can also come from a 'child-binding:' key in the binding for the
19parent devicetree node.
20
21Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
22which has all the information related to the node.
23
24The top-level entry points for the library are the EDT and Binding classes.
25See their constructor docstrings for details. There is also a
26bindings_from_paths() helper function.
27"""
28
29# NOTE: tests/test_edtlib.py is the test suite for this library.
30
31# Implementation notes
32# --------------------
33#
34# A '_' prefix on an identifier in Python is a convention for marking it private.
35# Please do not access private things. Instead, think of what API you need, and
36# add it.
37#
38# This module is not meant to have any global state. It should be possible to
39# create several EDT objects with independent binding paths and flags. If you
40# need to add a configuration parameter or the like, store it in the EDT
41# instance, and initialize it e.g. with a constructor argument.
42#
43# This library is layered on top of dtlib, and is not meant to expose it to
44# clients. This keeps the header generation script simple.
45#
46# General biased advice:
47#
48# - Consider using @property for APIs that don't need parameters. It makes
49#   functions look like attributes, which is less awkward in clients, and makes
50#   it easy to switch back and forth between variables and functions.
51#
52# - Think about the data type of the thing you're exposing. Exposing something
53#   as e.g. a list or a dictionary is often nicer and more flexible than adding
54#   a function.
55#
56# - Avoid get_*() prefixes on functions. Name them after the thing they return
57#   instead. This often makes the code read more naturally in callers.
58#
59#   Also, consider using @property instead of get_*().
60#
61# - Don't expose dtlib stuff directly.
62#
63# - Add documentation for any new APIs you add.
64#
65#   The convention here is that docstrings (quoted strings) are used for public
66#   APIs, and "doc comments" for internal functions.
67#
68#   @properties are documented in the class docstring, as if they were
69#   variables. See the existing @properties for a template.
70
71import base64
72import hashlib
73import logging
74import os
75import re
76from collections import defaultdict
77from collections.abc import Callable, Iterable
78from copy import deepcopy
79from dataclasses import dataclass
80from typing import TYPE_CHECKING, Any, NoReturn, Optional, Union
81
82import yaml
83
84try:
85    # Use the C LibYAML parser if available, rather than the Python parser.
86    # This makes e.g. gen_defines.py more than twice as fast.
87    from yaml import CLoader as Loader
88except ImportError:
89    from yaml import Loader  # type: ignore
90
91from devicetree._private import _slice_helper
92from devicetree.dtlib import DT, DTError, Type, to_num, to_nums
93from devicetree.dtlib import Node as dtlib_Node
94from devicetree.dtlib import Property as dtlib_Property
95from devicetree.grutils import Graph
96
97
98def _compute_hash(path: str) -> str:
99    # Calculates the hash associated with the node's full path.
100    hasher = hashlib.sha256()
101    hasher.update(path.encode())
102    return base64.b64encode(hasher.digest(), altchars=b'__').decode().rstrip('=')
103
104#
105# Public classes
106#
107
108
109class Binding:
110    """
111    Represents a parsed binding.
112
113    These attributes are available on Binding objects:
114
115    path:
116      The absolute path to the file defining the binding.
117
118    title:
119      The free-form title of the binding (optional).
120
121      When the content in the 'description:' is too long, the 'title:' can
122      be used as a heading for the extended description. Typically, it serves
123      as a description of the hardware model. For example:
124
125      title: Nordic GPIO
126
127      description: |
128        Descriptions and example nodes related to GPIO.
129        ...
130
131    description:
132      The free-form description of the binding.
133
134    compatible:
135      The compatible string the binding matches.
136
137      This may be None. For example, it's None when the Binding is inferred
138      from node properties. It can also be None for Binding objects created
139      using 'child-binding:' with no compatible.
140
141    examples:
142      Provides a minimal example node illustrating the binding (optional).
143      Like this:
144
145      examples:
146        - |
147          / {
148              model = "This is a sample node";
149              ...
150          };
151
152    prop2specs:
153      A dict mapping property names to PropertySpec objects
154      describing those properties' values.
155
156    specifier2cells:
157      A dict that maps specifier space names (like "gpio",
158      "clock", "pwm", etc.) to lists of cell names.
159
160      For example, if the binding YAML contains 'pin' and 'flags' cell names
161      for the 'gpio' specifier space, like this:
162
163          gpio-cells:
164          - pin
165          - flags
166
167      Then the Binding object will have a 'specifier2cells' attribute mapping
168      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
169      cells.
170
171    raw:
172      The binding as an object parsed from YAML.
173
174    bus:
175      If nodes with this binding's 'compatible' describe a bus, a string
176      describing the bus type (like "i2c") or a list describing supported
177      protocols (like ["i3c", "i2c"]). None otherwise.
178
179      Note that this is the raw value from the binding where it can be
180      a string or a list. Use "buses" instead unless you need the raw
181      value, where "buses" is always a list.
182
183    buses:
184      Deprived property from 'bus' where 'buses' is a list of bus(es),
185      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
186      no 'bus:' in this binding.
187
188    on_bus:
189      If nodes with this binding's 'compatible' appear on a bus, a string
190      describing the bus type (like "i2c"). None otherwise.
191
192    child_binding:
193      If this binding describes the properties of child nodes, then
194      this is a Binding object for those children; it is None otherwise.
195      A Binding object's 'child_binding.child_binding' is not None if there
196      are multiple levels of 'child-binding' descriptions in the binding.
197    """
198
199    def __init__(self, path: Optional[str], fname2path: dict[str, str],
200                 raw: Any = None, require_compatible: bool = True,
201                 require_description: bool = True, require_title: bool = False):
202        """
203        Binding constructor.
204
205        path:
206          Path to binding YAML file. May be None.
207
208        fname2path:
209          Map from include files to their absolute paths. Must
210          not be None, but may be empty.
211
212        raw:
213          Optional raw content in the binding.
214          This does not have to have any "include:" lines resolved.
215          May be left out, in which case 'path' is opened and read.
216          This can be used to resolve child bindings, for example.
217
218        require_compatible:
219          If True, it is an error if the binding does not contain a
220          "compatible:" line. If False, a missing "compatible:" is
221          not an error. Either way, "compatible:" must be a string
222          if it is present in the binding.
223
224        require_description:
225          If True, it is an error if the binding does not contain a
226          "description:" line. If False, a missing "description:" is
227          not an error. Either way, "description:" must be a string
228          if it is present in the binding.
229
230        require_title:
231          If True, it is an error if the binding does not contain a
232          "title:" line. If False, a missing "title:" is not an error.
233          Either way, "title:" must be a string if it is present in
234          the binding.
235        """
236        self.path: Optional[str] = path
237        self._fname2path: dict[str, str] = fname2path
238
239        if raw is None:
240            if path is None:
241                _err("you must provide either a 'path' or a 'raw' argument")
242            with open(path, encoding="utf-8") as f:
243                raw = yaml.load(f, Loader=_BindingLoader)
244
245        # Merge any included files into self.raw. This also pulls in
246        # inherited child binding definitions, so it has to be done
247        # before initializing those.
248        self.raw: dict = self._merge_includes(raw, self.path)
249
250        # Recursively initialize any child bindings. These don't
251        # require a 'compatible', 'description' or 'title' to be well
252        # defined, but they must be dicts.
253        if "child-binding" in raw:
254            if not isinstance(raw["child-binding"], dict):
255                _err(f"malformed 'child-binding:' in {self.path}, "
256                     "expected a binding (dictionary with keys/values)")
257            self.child_binding: Optional[Binding] = Binding(
258                path, fname2path,
259                raw=raw["child-binding"],
260                require_compatible=False,
261                require_description=False)
262        else:
263            self.child_binding = None
264
265        # Make sure this is a well defined object.
266        self._check(require_compatible, require_description, require_title)
267
268        # Initialize look up tables.
269        self.prop2specs: dict[str, PropertySpec] = {}
270        for prop_name in self.raw.get("properties", {}):
271            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
272        self.specifier2cells: dict[str, list[str]] = {}
273        for key, val in self.raw.items():
274            if key.endswith("-cells"):
275                self.specifier2cells[key[:-len("-cells")]] = val
276
277    def __repr__(self) -> str:
278        if self.compatible:
279            compat = f" for compatible '{self.compatible}'"
280        else:
281            compat = ""
282        basename = os.path.basename(self.path or "")
283        return f"<Binding {basename}" + compat + ">"
284
285    @property
286    def title(self) -> Optional[str]:
287        "See the class docstring"
288        return self.raw.get('title')
289
290    @property
291    def description(self) -> Optional[str]:
292        "See the class docstring"
293        return self.raw.get('description')
294
295    @property
296    def compatible(self) -> Optional[str]:
297        "See the class docstring"
298        return self.raw.get('compatible')
299
300    @property
301    def bus(self) -> Union[None, str, list[str]]:
302        "See the class docstring"
303        return self.raw.get('bus')
304
305    @property
306    def examples(self) -> Optional[list[str]]:
307        "See the class docstring"
308        return self.raw.get('examples')
309
310    @property
311    def buses(self) -> list[str]:
312        "See the class docstring"
313        if self.raw.get('bus') is not None:
314            return self._buses
315        else:
316            return []
317
318    @property
319    def on_bus(self) -> Optional[str]:
320        "See the class docstring"
321        return self.raw.get('on-bus')
322
323    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
324        # Constructor helper. Merges included files in
325        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
326        # source of include files, removing the "include" key while
327        # doing so.
328        #
329        # This treats 'binding_path' as the binding file being built up
330        # and uses it for error messages.
331
332        if "include" not in raw:
333            return raw
334
335        include = raw.pop("include")
336
337        # First, merge the included files together. If more than one included
338        # file has a 'required:' for a particular property, OR the values
339        # together, so that 'required: true' wins.
340
341        merged: dict[str, Any] = {}
342
343        if isinstance(include, str):
344            # Simple scalar string case
345            _merge_props(merged, self._load_raw(include), None, binding_path,
346                         False)
347        elif isinstance(include, list):
348            # List of strings and maps. These types may be intermixed.
349            for elem in include:
350                if isinstance(elem, str):
351                    _merge_props(merged, self._load_raw(elem), None,
352                                 binding_path, False)
353                elif isinstance(elem, dict):
354                    name = elem.pop('name', None)
355                    allowlist = elem.pop('property-allowlist', None)
356                    blocklist = elem.pop('property-blocklist', None)
357                    child_filter = elem.pop('child-binding', None)
358
359                    if elem:
360                        # We've popped out all the valid keys.
361                        _err(f"'include:' in {binding_path} should not have "
362                             f"these unexpected contents: {elem}")
363
364                    _check_include_dict(name, allowlist, blocklist,
365                                        child_filter, binding_path)
366
367                    contents = self._load_raw(name)
368
369                    _filter_properties(contents, allowlist, blocklist,
370                                       child_filter, binding_path)
371                    _merge_props(merged, contents, None, binding_path, False)
372                else:
373                    _err(f"all elements in 'include:' in {binding_path} "
374                         "should be either strings or maps with a 'name' key "
375                         "and optional 'property-allowlist' or "
376                         f"'property-blocklist' keys, but got: {elem}")
377        else:
378            # Invalid item.
379            _err(f"'include:' in {binding_path} "
380                 f"should be a string or list, but has type {type(include)}")
381
382        # Next, merge the merged included files into 'raw'. Error out if
383        # 'raw' has 'required: false' while the merged included files have
384        # 'required: true'.
385
386        _merge_props(raw, merged, None, binding_path, check_required=True)
387
388        return raw
389
390    def _load_raw(self, fname: str) -> dict:
391        # Returns the contents of the binding given by 'fname' after merging
392        # any bindings it lists in 'include:' into it. 'fname' is just the
393        # basename of the file, so we check that there aren't multiple
394        # candidates.
395
396        path = self._fname2path.get(fname)
397
398        if not path:
399            _err(f"'{fname}' not found")
400
401        with open(path, encoding="utf-8") as f:
402            contents = yaml.load(f, Loader=_BindingLoader)
403            if not isinstance(contents, dict):
404                _err(f'{path}: invalid contents, expected a mapping')
405
406        return self._merge_includes(contents, path)
407
408    def _check(self, require_compatible: bool, require_description: bool,
409               require_title: bool):
410        # Does sanity checking on the binding.
411
412        raw = self.raw
413
414        if "compatible" in raw:
415            compatible = raw["compatible"]
416            if not isinstance(compatible, str):
417                _err(f"malformed 'compatible: {compatible}' "
418                     f"field in {self.path} - "
419                     f"should be a string, not {type(compatible).__name__}")
420        elif require_compatible:
421            _err(f"missing 'compatible' in {self.path}")
422
423        if "title" in raw:
424            title = raw["title"]
425            if not isinstance(title, str) or not title:
426                _err(f"malformed or empty 'title' in {self.path}")
427        elif require_title:
428            _err(f"missing 'title' in {self.path}")
429
430        if "description" in raw:
431            description = raw["description"]
432            if not isinstance(description, str) or not description:
433                _err(f"malformed or empty 'description' in {self.path}")
434        elif require_description:
435            _err(f"missing 'description' in {self.path}")
436
437        # Allowed top-level keys. The 'include' key should have been
438        # removed by _load_raw() already.
439        ok_top = {"title", "description", "compatible", "bus",
440                  "on-bus", "properties", "child-binding", "examples"}
441
442        # Descriptive errors for legacy bindings.
443        legacy_errors = {
444            "#cells": "expected *-cells syntax",
445            "child": "use 'bus: <bus>' instead",
446            "child-bus": "use 'bus: <bus>' instead",
447            "parent": "use 'on-bus: <bus>' instead",
448            "parent-bus": "use 'on-bus: <bus>' instead",
449            "sub-node": "use 'child-binding' instead",
450        }
451
452        for key in raw:
453            if key in legacy_errors:
454                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
455
456            if key not in ok_top and not key.endswith("-cells"):
457                _err(f"unknown key '{key}' in {self.path}, "
458                     f"expected one of {', '.join(ok_top)}, or *-cells")
459
460        if "bus" in raw:
461            bus = raw["bus"]
462            if (not isinstance(bus, str) and
463               (not isinstance(bus, list) and
464                not all(isinstance(elem, str) for elem in bus))):
465                _err(f"malformed 'bus:' value in {self.path}, "
466                     "expected string or list of strings")
467
468            if isinstance(bus, list):
469                self._buses = bus
470            else:
471                # Convert bus into a list
472                self._buses = [bus]
473
474        if ("on-bus" in raw
475            and not isinstance(raw["on-bus"], str)):
476            _err(f"malformed 'on-bus:' value in {self.path}, "
477                 "expected string")
478
479        self._check_properties()
480
481        for key, val in raw.items():
482            if (key.endswith("-cells")
483                and not isinstance(val, list)
484                or not all(isinstance(elem, str) for elem in val)):
485                _err(f"malformed '{key}:' in {self.path}, "
486                     "expected a list of strings")
487
488    def _check_properties(self) -> None:
489        # _check() helper for checking the contents of 'properties:'.
490
491        raw = self.raw
492
493        if "properties" not in raw:
494            return
495
496        ok_prop_keys = {"description", "type", "required",
497                        "enum", "const", "default", "deprecated",
498                        "specifier-space"}
499
500        for prop_name, options in raw["properties"].items():
501            for key in options:
502                if key not in ok_prop_keys:
503                    _err(f"unknown setting '{key}' in "
504                         f"'properties: {prop_name}: ...' in {self.path}, "
505                         f"expected one of {', '.join(ok_prop_keys)}")
506
507            _check_prop_by_type(prop_name, options, self.path)
508
509            for true_false_opt in ["required", "deprecated"]:
510                if true_false_opt in options:
511                    option = options[true_false_opt]
512                    if not isinstance(option, bool):
513                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
514                             f"for '{prop_name}' in 'properties' in {self.path}, "
515                             "expected true/false")
516
517            if options.get("deprecated") and options.get("required"):
518                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
519                      "have both 'deprecated' and 'required' set")
520
521            if ("description" in options
522                and not isinstance(options["description"], str)):
523                _err("missing, malformed, or empty 'description' for "
524                     f"'{prop_name}' in 'properties' in {self.path}")
525
526            if "enum" in options and not isinstance(options["enum"], list):
527                _err(f"enum in {self.path} for property '{prop_name}' "
528                     "is not a list")
529
530
531class PropertySpec:
532    """
533    Represents a "property specification", i.e. the description of a
534    property provided by a binding file, like its type and description.
535
536    These attributes are available on PropertySpec objects:
537
538    binding:
539      The Binding object which defined this property.
540
541    name:
542      The property's name.
543
544    path:
545      The file where this property was defined. In case a binding includes
546      other bindings, this is the including binding file.
547      Generally this means that this will be the binding file specifying
548      the devicetree node of which this is a property.
549
550    type:
551      The type of the property as a string, as given in the binding.
552
553    description:
554      The free-form description of the property as a string, or None.
555
556    enum:
557      A list of values the property may take as given in the binding, or None.
558
559    enum_tokenizable:
560      True if enum is not None and all the values in it are tokenizable;
561      False otherwise.
562
563      A property must have string or string-array type and an "enum:" in its
564      binding to be tokenizable. Additionally, the "enum:" values must be
565      unique after converting all non-alphanumeric characters to underscores
566      (so "foo bar" and "foo_bar" in the same "enum:" would not be
567      tokenizable).
568
569    enum_upper_tokenizable:
570      Like 'enum_tokenizable', with the additional restriction that the
571      "enum:" values must be unique after uppercasing and converting
572      non-alphanumeric characters to underscores.
573
574    const:
575      The property's constant value as given in the binding, or None.
576
577    default:
578      The property's default value as given in the binding, or None.
579
580    deprecated:
581      True if the property is deprecated; False otherwise.
582
583    required:
584      True if the property is marked required; False otherwise.
585
586    specifier_space:
587      The specifier space for the property as given in the binding, or None.
588    """
589
590    def __init__(self, name: str, binding: Binding):
591        self.binding: Binding = binding
592        self.name: str = name
593        self._raw: dict[str, Any] = self.binding.raw["properties"][name]
594
595    def __repr__(self) -> str:
596        return f"<PropertySpec {self.name} type '{self.type}'>"
597
598    @property
599    def path(self) -> Optional[str]:
600        "See the class docstring"
601        return self.binding.path
602
603    @property
604    def type(self) -> str:
605        "See the class docstring"
606        return self._raw["type"]
607
608    @property
609    def description(self) -> Optional[str]:
610        "See the class docstring"
611        return self._raw.get("description")
612
613    @property
614    def enum(self) -> Optional[list]:
615        "See the class docstring"
616        return self._raw.get("enum")
617
618    @property
619    def enum_tokenizable(self) -> bool:
620        "See the class docstring"
621        if not hasattr(self, '_enum_tokenizable'):
622            if self.type not in {'string', 'string-array'} or self.enum is None:
623                self._enum_tokenizable = False
624            else:
625                # Saving _as_tokens here lets us reuse it in
626                # enum_upper_tokenizable.
627                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
628                                          '_', value)
629                                   for value in self.enum]
630                self._enum_tokenizable = (len(self._as_tokens) ==
631                                          len(set(self._as_tokens)))
632
633        return self._enum_tokenizable
634
635    @property
636    def enum_upper_tokenizable(self) -> bool:
637        "See the class docstring"
638        if not hasattr(self, '_enum_upper_tokenizable'):
639            if not self.enum_tokenizable:
640                self._enum_upper_tokenizable = False
641            else:
642                self._enum_upper_tokenizable = (
643                    len(self._as_tokens) == len(
644                        set(x.upper() for x in self._as_tokens)
645                    ))
646        return self._enum_upper_tokenizable
647
648    @property
649    def const(self) -> Union[None, int, list[int], str, list[str]]:
650        "See the class docstring"
651        return self._raw.get("const")
652
653    @property
654    def default(self) -> Union[None, int, list[int], str, list[str]]:
655        "See the class docstring"
656        return self._raw.get("default")
657
658    @property
659    def required(self) -> bool:
660        "See the class docstring"
661        return self._raw.get("required", False)
662
663    @property
664    def deprecated(self) -> bool:
665        "See the class docstring"
666        return self._raw.get("deprecated", False)
667
668    @property
669    def specifier_space(self) -> Optional[str]:
670        "See the class docstring"
671        return self._raw.get("specifier-space")
672
673PropertyValType = Union[int, str,
674                        list[int], list[str],
675                        'Node', list['Node'],
676                        list[Optional['ControllerAndData']],
677                        bytes, None]
678
679
680@dataclass
681class Property:
682    """
683    Represents a property on a Node, as set in its DT node and with
684    additional info from the 'properties:' section of the binding.
685
686    Only properties mentioned in 'properties:' get created. Properties of type
687    'compound' currently do not get Property instances, as it's not clear
688    what to generate for them.
689
690    These attributes are available on Property objects. Several are
691    just convenience accessors for attributes on the PropertySpec object
692    accessible via the 'spec' attribute.
693
694    These attributes are available on Property objects:
695
696    spec:
697      The PropertySpec object which specifies this property.
698
699    val:
700      The value of the property, with the format determined by spec.type,
701      which comes from the 'type:' string in the binding.
702
703        - For 'type: int/array/string/string-array', 'val' is what you'd expect
704          (a Python integer or string, or a list of them)
705
706        - For 'type: uint8-array', 'val' is a bytes object
707
708        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
709          instance
710
711        - For 'type: phandles', 'val' is a list of the pointed-to Node
712          instances
713
714        - For 'type: phandle-array', 'val' is a list of ControllerAndData
715          instances. See the documentation for that class.
716
717    node:
718      The Node instance the property is on
719
720    name:
721      Convenience for spec.name.
722
723    description:
724      Convenience for spec.description with leading and trailing whitespace
725      (including newlines) removed. May be None.
726
727    type:
728      Convenience for spec.type.
729
730    val_as_tokens:
731      The value of the property as a list of tokens, i.e. with non-alphanumeric
732      characters replaced with underscores. This is only safe to access
733      if 'spec.enum_tokenizable' returns True.
734
735    enum_indices:
736      A list of indices of 'val' in 'spec.enum' (which comes from the 'enum:'
737      list in the binding), or None if spec.enum is None.
738    """
739
740    spec: PropertySpec
741    val: PropertyValType
742    node: 'Node'
743
744    @property
745    def name(self) -> str:
746        "See the class docstring"
747        return self.spec.name
748
749    @property
750    def description(self) -> Optional[str]:
751        "See the class docstring"
752        return self.spec.description.strip() if self.spec.description else None
753
754    @property
755    def type(self) -> str:
756        "See the class docstring"
757        return self.spec.type
758
759    @property
760    def val_as_tokens(self) -> list[str]:
761        "See the class docstring"
762        ret = []
763        for subval in self.val if isinstance(self.val, list) else [self.val]:
764            assert isinstance(subval, str)
765            ret.append(str_as_token(subval))
766        return ret
767
768    @property
769    def enum_indices(self) -> Optional[list[int]]:
770        "See the class docstring"
771        enum = self.spec.enum
772        val = self.val if isinstance(self.val, list) else [self.val]
773        return [enum.index(subval) for subval in val] if enum else None
774
775
776@dataclass
777class Register:
778    """
779    Represents a register on a node.
780
781    These attributes are available on Register objects:
782
783    node:
784      The Node instance this register is from
785
786    name:
787      The name of the register as given in the 'reg-names' property, or None if
788      there is no 'reg-names' property
789
790    addr:
791      The starting address of the register, in the parent address space, or None
792      if #address-cells is zero. Any 'ranges' properties are taken into account.
793
794    size:
795      The length of the register in bytes
796    """
797
798    node: 'Node'
799    name: Optional[str]
800    addr: Optional[int]
801    size: Optional[int]
802
803
804@dataclass
805class Range:
806    """
807    Represents a translation range on a node as described by the 'ranges' property.
808
809    These attributes are available on Range objects:
810
811    node:
812      The Node instance this range is from
813
814    child_bus_cells:
815      The number of cells used to describe a child bus address.
816
817    child_bus_addr:
818      A physical address within the child bus address space, or None if the
819      child's #address-cells equals 0.
820
821    parent_bus_cells:
822      The number of cells used to describe a parent bus address.
823
824    parent_bus_addr:
825      A physical address within the parent bus address space, or None if the
826      parent's #address-cells equals 0.
827
828    length_cells:
829      The number of cells used to describe the size of range in
830      the child's address space.
831
832    length:
833      The size of the range in the child address space, or None if the
834      child's #size-cells equals 0.
835    """
836    node: 'Node'
837    child_bus_cells: int
838    child_bus_addr: Optional[int]
839    parent_bus_cells: int
840    parent_bus_addr: Optional[int]
841    length_cells: int
842    length: Optional[int]
843
844
845@dataclass
846class ControllerAndData:
847    """
848    Represents an entry in an 'interrupts' or 'type: phandle-array' property
849    value, e.g. <&ctrl-1 4 0> in
850
851        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
852
853    These attributes are available on ControllerAndData objects:
854
855    node:
856      The Node instance the property appears on
857
858    controller:
859      The Node instance for the controller (e.g. the controller the interrupt
860      gets sent to for interrupts)
861
862    data:
863      A dictionary that maps names from the *-cells key in the binding for the
864      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
865      above.
866
867      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
868
869    name:
870      The name of the entry as given in
871      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
872      *-names property
873
874    basename:
875      Basename for the controller when supporting named cells. AKA, the specifier space.
876    """
877    node: 'Node'
878    controller: 'Node'
879    data: dict
880    name: Optional[str]
881    basename: Optional[str]
882
883
884@dataclass
885class PinCtrl:
886    """
887    Represents a pin control configuration for a set of pins on a device,
888    e.g. pinctrl-0 or pinctrl-1.
889
890    These attributes are available on PinCtrl objects:
891
892    node:
893      The Node instance the pinctrl-* property is on
894
895    name:
896      The name of the configuration, as given in pinctrl-names, or None if
897      there is no pinctrl-names property
898
899    name_as_token:
900      Like 'name', but with non-alphanumeric characters converted to underscores.
901
902    conf_nodes:
903      A list of Node instances for the pin configuration nodes, e.g.
904      the nodes pointed at by &state_1 and &state_2 in
905
906          pinctrl-0 = <&state_1 &state_2>;
907    """
908
909    node: 'Node'
910    name: Optional[str]
911    conf_nodes: list['Node']
912
913    @property
914    def name_as_token(self):
915        "See the class docstring"
916        return str_as_token(self.name) if self.name is not None else None
917
918
919class Node:
920    """
921    Represents a devicetree node, augmented with information from bindings, and
922    with some interpretation of devicetree properties. There's a one-to-one
923    correspondence between devicetree nodes and Nodes.
924
925    These attributes are available on Node objects:
926
927    edt:
928      The EDT instance this node is from
929
930    name:
931      The name of the node
932
933    unit_addr:
934      An integer with the ...@<unit-address> portion of the node name,
935      translated through any 'ranges' properties on parent nodes, or None if
936      the node name has no unit-address portion. PCI devices use a different
937      node name format ...@<dev>,<func> or ...@<dev> (e.g. "pcie@1,0"), in
938      this case None is returned.
939
940    title:
941      The title string from the binding for the node, or None if the node
942      has no binding.
943
944    description:
945      The description string from the binding for the node, or None if the node
946      has no binding. Leading and trailing whitespace (including newlines) is
947      removed.
948
949    path:
950      The devicetree path of the node
951
952    label:
953      The text from the 'label' property on the node, or None if the node has
954      no 'label'
955
956    labels:
957      A list of all of the devicetree labels for the node, in the same order
958      as the labels appear, but with duplicates removed.
959
960      This corresponds to the actual devicetree source labels, unlike the
961      "label" attribute, which is the value of a devicetree property named
962      "label".
963
964    parent:
965      The Node instance for the devicetree parent of the Node, or None if the
966      node is the root node
967
968    children:
969      A dictionary with the Node instances for the devicetree children of the
970      node, indexed by name
971
972    dep_ordinal:
973      A non-negative integer value such that the value for a Node is
974      less than the value for all Nodes that depend on it.
975
976      The ordinal is defined for all Nodes, and is unique among nodes in its
977      EDT 'nodes' list.
978
979    hash:
980      A hashed value of the devicetree path of the node. This is defined for
981      all Nodes, and is checked for uniqueness among nodes in its EDT 'nodes'
982      list.
983
984    required_by:
985      A list with the nodes that directly depend on the node
986
987    depends_on:
988      A list with the nodes that the node directly depends on
989
990    status:
991      The node's status property value, as a string, or "okay" if the node
992      has no status property set.
993
994    read_only:
995      True if the node has a 'read-only' property, and False otherwise
996
997    matching_compat:
998      The 'compatible' string for the binding that matched the node, or None if
999      the node has no binding
1000
1001    binding_path:
1002      The path to the binding file for the node, or None if the node has no
1003      binding
1004
1005    compats:
1006      A list of 'compatible' strings for the node, in the same order that
1007      they're listed in the .dts file
1008
1009    ranges:
1010      A list of Range objects extracted from the node's ranges property.
1011      The list is empty if the node does not have a range property.
1012
1013    regs:
1014      A list of Register objects for the node's registers
1015
1016    props:
1017      A dict that maps property names to Property objects.
1018      Property objects are created for the devicetree properties
1019      defined by the node's binding and that have a default value
1020      or for which a value is set in the DTS.
1021
1022    aliases:
1023      A list of aliases for the node. This is fetched from the /aliases node.
1024
1025    interrupts:
1026      A list of ControllerAndData objects for the interrupts generated by the
1027      node. The list is empty if the node does not generate interrupts.
1028
1029    pinctrls:
1030      A list of PinCtrl objects for the pinctrl-<index> properties on the
1031      node, sorted by index. The list is empty if the node does not have any
1032      pinctrl-<index> properties.
1033
1034    buses:
1035      If the node is a bus node (has a 'bus:' key in its binding), then this
1036      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
1037      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
1038      If the node is not a bus node, then this attribute is an empty list.
1039
1040    on_buses:
1041      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
1042      multiple protocols are supported via the same bus. The bus is determined
1043      by searching upwards for a parent node whose binding has a 'bus:' key,
1044      returning the value of the first 'bus:' key found. If none of the node's
1045      parents has a 'bus:' key, this attribute is an empty list.
1046
1047    bus_node:
1048      Like on_bus, but contains the Node for the bus controller, or None if the
1049      node is not on a bus.
1050
1051    flash_controller:
1052      The flash controller for the node. Only meaningful for nodes representing
1053      flash partitions.
1054
1055    spi_cs_gpio:
1056      The device's SPI GPIO chip select as a ControllerAndData instance, if it
1057      exists, and None otherwise. See
1058      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
1059
1060    gpio_hogs:
1061      A list of ControllerAndData objects for the GPIOs hogged by the node. The
1062      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
1063      nodes.
1064
1065    is_pci_device:
1066      True if the node is a PCI device.
1067    """
1068
1069    def __init__(
1070        self,
1071        dt_node: dtlib_Node,
1072        edt: "EDT",
1073        support_fixed_partitions_on_any_bus: bool = True,
1074    ):
1075        '''
1076        For internal use only; not meant to be used outside edtlib itself.
1077        '''
1078
1079        compats = (
1080            dt_node.props["compatible"].to_strings()
1081            if "compatible" in dt_node.props
1082            else []
1083        )
1084
1085        # Private, don't touch outside the class:
1086        self._node: dtlib_Node = dt_node
1087        self._binding: Optional[Binding] = None
1088
1089        # Public, some of which are initialized properly later:
1090        self.edt: EDT = edt
1091        self.dep_ordinal: int = -1
1092        self.compats: list[str] = compats
1093        self.ranges: list[Range] = []
1094        self.regs: list[Register] = []
1095        self.props: dict[str, Property] = {}
1096        self.interrupts: list[ControllerAndData] = []
1097        self.pinctrls: list[PinCtrl] = []
1098        self.bus_node = self._bus_node(support_fixed_partitions_on_any_bus)
1099        self.hash: str = _compute_hash(dt_node.path)
1100
1101        self._init_binding()
1102        self._init_regs()
1103        self._init_ranges()
1104
1105    @property
1106    def name(self) -> str:
1107        "See the class docstring"
1108        return self._node.name
1109
1110    @property
1111    def filename(self) -> str:
1112        "See the class docstring"
1113        return self._node.filename
1114
1115    @property
1116    def lineno(self) -> int:
1117        "See the class docstring"
1118        return self._node.lineno
1119
1120    @property
1121    def unit_addr(self) -> Optional[int]:
1122        "See the class docstring"
1123
1124        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1125
1126        # PCI devices use a different node name format (e.g. "pcie@1,0")
1127        if "@" not in self.name or self.is_pci_device:
1128            return None
1129
1130        try:
1131            addr = int(self.name.split("@", 1)[1], 16)
1132        except ValueError:
1133            _err(f"{self!r} has non-hex unit address")
1134
1135        return _translate(addr, self._node)
1136
1137    @property
1138    def title(self) -> Optional[str]:
1139        "See the class docstring."
1140        if self._binding:
1141            return self._binding.title
1142        return None
1143
1144    @property
1145    def description(self) -> Optional[str]:
1146        "See the class docstring."
1147        if self._binding:
1148            return self._binding.description
1149        return None
1150
1151    @property
1152    def path(self) ->  str:
1153        "See the class docstring"
1154        return self._node.path
1155
1156    @property
1157    def label(self) -> Optional[str]:
1158        "See the class docstring"
1159        if "label" in self._node.props:
1160            return self._node.props["label"].to_string()
1161        return None
1162
1163    @property
1164    def labels(self) -> list[str]:
1165        "See the class docstring"
1166        return self._node.labels
1167
1168    @property
1169    def parent(self) -> Optional['Node']:
1170        "See the class docstring"
1171        return self.edt._node2enode.get(self._node.parent) # type: ignore
1172
1173    @property
1174    def children(self) -> dict[str, 'Node']:
1175        "See the class docstring"
1176        # Could be initialized statically too to preserve identity, but not
1177        # sure if needed. Parent nodes being initialized before their children
1178        # would need to be kept in mind.
1179        return {name: self.edt._node2enode[node]
1180                for name, node in self._node.nodes.items()}
1181
1182    def child_index(self, node) -> int:
1183        """Get the index of *node* in self.children.
1184        Raises KeyError if the argument is not a child of this node.
1185        """
1186        if not hasattr(self, '_child2index'):
1187            # Defer initialization of this lookup table until this
1188            # method is callable to handle parents needing to be
1189            # initialized before their chidlren. By the time we
1190            # return from __init__, 'self.children' is callable.
1191            self._child2index: dict[str, int] = {}
1192            for index, child_path in enumerate(child.path for child in
1193                                               self.children.values()):
1194                self._child2index[child_path] = index
1195
1196        return self._child2index[node.path]
1197
1198    @property
1199    def required_by(self) -> list['Node']:
1200        "See the class docstring"
1201        return self.edt._graph.required_by(self)
1202
1203    @property
1204    def depends_on(self) -> list['Node']:
1205        "See the class docstring"
1206        return self.edt._graph.depends_on(self)
1207
1208    @property
1209    def status(self) -> str:
1210        "See the class docstring"
1211        status = self._node.props.get("status")
1212
1213        if status is None:
1214            as_string = "okay"
1215        else:
1216            as_string = status.to_string()
1217
1218        return as_string
1219
1220    @property
1221    def read_only(self) -> bool:
1222        "See the class docstring"
1223        return "read-only" in self._node.props
1224
1225    @property
1226    def matching_compat(self) -> Optional[str]:
1227        "See the class docstring"
1228        if self._binding:
1229            return self._binding.compatible
1230        return None
1231
1232    @property
1233    def binding_path(self) -> Optional[str]:
1234        "See the class docstring"
1235        if self._binding:
1236            return self._binding.path
1237        return None
1238
1239    @property
1240    def aliases(self) -> list[str]:
1241        "See the class docstring"
1242        return [alias for alias, node in self._node.dt.alias2node.items()
1243                if node is self._node]
1244
1245    @property
1246    def buses(self) -> list[str]:
1247        "See the class docstring"
1248        if self._binding:
1249            return self._binding.buses
1250        return []
1251
1252    @property
1253    def on_buses(self) -> list[str]:
1254        "See the class docstring"
1255        bus_node = self.bus_node
1256        return bus_node.buses if bus_node else []
1257
1258    @property
1259    def flash_controller(self) -> 'Node':
1260        "See the class docstring"
1261
1262        # The node path might be something like
1263        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1264        # up two levels to get the flash and check its compat. The flash
1265        # controller might be the flash itself (for cases like NOR flashes).
1266        # For the case of 'soc-nv-flash', we assume the controller is the
1267        # parent of the flash node.
1268
1269        if not self.parent or not self.parent.parent:
1270            _err(f"flash partition {self!r} lacks parent or grandparent node")
1271
1272        controller = self.parent.parent
1273        if controller.matching_compat == "soc-nv-flash":
1274            if controller.parent is None:
1275                _err(f"flash controller '{controller.path}' cannot be the root node")
1276            return controller.parent
1277        return controller
1278
1279    @property
1280    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1281        "See the class docstring"
1282
1283        if not ("spi" in self.on_buses
1284                and self.bus_node
1285                and "cs-gpios" in self.bus_node.props):
1286            return None
1287
1288        if not self.regs:
1289            _err(f"{self!r} needs a 'reg' property, to look up the "
1290                 "chip select index for SPI")
1291
1292        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1293        if TYPE_CHECKING:
1294            assert isinstance(parent_cs_lst, list)
1295
1296        # cs-gpios is indexed by the unit address
1297        cs_index = self.regs[0].addr
1298        if TYPE_CHECKING:
1299            assert isinstance(cs_index, int)
1300
1301        if cs_index >= len(parent_cs_lst):
1302            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1303                 "is >= number of cs-gpios in "
1304                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1305
1306        ret = parent_cs_lst[cs_index]
1307        if TYPE_CHECKING:
1308            assert isinstance(ret, ControllerAndData)
1309        return ret
1310
1311    @property
1312    def gpio_hogs(self) -> list[ControllerAndData]:
1313        "See the class docstring"
1314
1315        if "gpio-hog" not in self.props:
1316            return []
1317
1318        if not self.parent or "gpio-controller" not in self.parent.props:
1319            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1320
1321        if "#gpio-cells" not in self.parent._node.props:
1322            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1323
1324        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1325        res = []
1326
1327        for item in _slice(self._node, "gpios", 4*n_cells,
1328                           f"4*(<#gpio-cells> (= {n_cells})"):
1329            controller = self.parent
1330            res.append(ControllerAndData(
1331                node=self, controller=controller,
1332                data=self._named_cells(controller, item, "gpio"),
1333                name=None, basename="gpio"))
1334
1335        return res
1336
1337    @property
1338    def has_child_binding(self) -> bool:
1339        """
1340        True if the node's binding contains a child-binding definition, False
1341        otherwise
1342        """
1343        return bool(self._binding and self._binding.child_binding)
1344
1345    @property
1346    def is_pci_device(self) -> bool:
1347        "See the class docstring"
1348        return 'pcie' in self.on_buses
1349
1350    def __repr__(self) -> str:
1351        if self.binding_path:
1352            binding = "binding " + self.binding_path
1353        else:
1354            binding = "no binding"
1355        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1356
1357    def _init_binding(self) -> None:
1358        # Initializes Node._binding. It holds data from the node's binding file,
1359        # in the format returned by PyYAML (plain Python lists, dicts, etc.), or
1360        # None if the node has no binding.
1361
1362        # This relies on the parent of the node having already been
1363        # initialized, which is guaranteed by going through the nodes in
1364        # node_iter() order.
1365
1366        if self.path in self.edt._infer_binding_for_paths:
1367            self._binding_from_properties()
1368            return
1369
1370        if self.compats:
1371            on_buses = self.on_buses
1372
1373            for compat in self.compats:
1374                # When matching, respect the order of the 'compatible' entries,
1375                # and for each one first try to match against an explicitly
1376                # specified bus (if any) and then against any bus. This is so
1377                # that matching against bindings which do not specify a bus
1378                # works the same way in Zephyr as it does elsewhere.
1379                binding = None
1380
1381                # Collect all available bindings for this compatible for warning purposes
1382                available_bindings = [
1383                    (binding_bus, candidate_binding.path)
1384                    for (binding_compat, binding_bus), candidate_binding
1385                    in self.edt._compat2binding.items()
1386                    if binding_compat == compat
1387                ]
1388
1389                for bus in on_buses:
1390                    if (compat, bus) in self.edt._compat2binding:
1391                        binding = self.edt._compat2binding[compat, bus]
1392                        break
1393
1394                if not binding:
1395                    if (compat, None) in self.edt._compat2binding:
1396                        binding = self.edt._compat2binding[compat, None]
1397                    else:
1398                        # No matching binding found - warn if bindings exist for other buses
1399                        if (available_bindings and
1400                            self.edt._warn_bus_mismatch):
1401                            current_bus = on_buses[0] if on_buses else "none"
1402
1403                            # Format available bus information for the warning
1404                            available_bus_info = []
1405                            for bus, binding_path in available_bindings:  # type: ignore
1406                                bus_name = bus if bus is not None else "any"
1407                                # Get relative path for cleaner output
1408                                rel_path = (os.path.relpath(binding_path)
1409                                            if binding_path is not None else "unknown")
1410                                bus_info = f"'{bus_name}' (from {rel_path})"
1411                                available_bus_info.append(bus_info)
1412
1413                            _LOG.warning(
1414                                f"Node '{self.path}' with compatible '{compat}' "
1415                                f"is on bus '{current_bus}', but available bindings "
1416                                f"expect: {', '.join(available_bus_info)}. "
1417                                f"No binding will be applied to this node."
1418                            )
1419                        continue
1420
1421                self._binding = binding
1422                return
1423        else:
1424            # No 'compatible' property. See if the parent binding has
1425            # a compatible. This can come from one or more levels of
1426            # nesting with 'child-binding:'.
1427
1428            binding_from_parent = self._binding_from_parent()
1429            if binding_from_parent:
1430                self._binding = binding_from_parent
1431                return
1432
1433        # No binding found
1434        self._binding = None
1435
1436    def _binding_from_properties(self) -> None:
1437        # Sets up a Binding object synthesized from the properties in the node.
1438
1439        if self.compats:
1440            _err(f"compatible in node with inferred binding: {self.path}")
1441
1442        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1443        raw: dict[str, Any] = {
1444            'description': 'Inferred binding from properties, via edtlib.',
1445            'properties': {},
1446        }
1447        for name, prop in self._node.props.items():
1448            pp: dict[str, str] = {}
1449            if prop.type == Type.EMPTY:
1450                pp["type"] = "boolean"
1451            elif prop.type == Type.BYTES:
1452                pp["type"] = "uint8-array"
1453            elif prop.type == Type.NUM:
1454                pp["type"] = "int"
1455            elif prop.type == Type.NUMS:
1456                pp["type"] = "array"
1457            elif prop.type == Type.STRING:
1458                pp["type"] = "string"
1459            elif prop.type == Type.STRINGS:
1460                pp["type"] = "string-array"
1461            elif prop.type == Type.PHANDLE:
1462                pp["type"] = "phandle"
1463            elif prop.type == Type.PHANDLES:
1464                pp["type"] = "phandles"
1465            elif prop.type == Type.PHANDLES_AND_NUMS:
1466                pp["type"] = "phandle-array"
1467            elif prop.type == Type.PATH:
1468                pp["type"] = "path"
1469            else:
1470                _err(f"cannot infer binding from property: {prop} "
1471                     f"with type {prop.type!r}")
1472            raw['properties'][name] = pp
1473
1474        # Set up Node state.
1475        self.compats = []
1476        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1477
1478    def _binding_from_parent(self) -> Optional[Binding]:
1479        # Returns the binding from 'child-binding:' in the parent node's
1480        # binding.
1481
1482        if not self.parent:
1483            return None
1484
1485        pbinding = self.parent._binding
1486        if not pbinding:
1487            return None
1488
1489        if pbinding.child_binding:
1490            return pbinding.child_binding
1491
1492        return None
1493
1494    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1495                  ) -> Optional['Node']:
1496        # Returns the value for self.bus_node. Relies on parent nodes being
1497        # initialized before their children.
1498
1499        if not self.parent:
1500            # This is the root node
1501            return None
1502
1503        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1504        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1505        # bus be None means we'll always match the binding for fixed-partitions
1506        # also this means want processing the fixed-partitions node we wouldn't
1507        # try to do anything bus specific with it.
1508        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1509            return None
1510
1511        if self.parent.buses:
1512            # The parent node is a bus node
1513            return self.parent
1514
1515        # Same bus node as parent (possibly None)
1516        return self.parent.bus_node
1517
1518    def _init_crossrefs(
1519        self, default_prop_types: bool = False, err_on_deprecated: bool = False
1520    ) -> None:
1521        # Initializes all properties that require cross-references to other
1522        # nodes, like 'phandle' and 'phandles'. This is done after all nodes
1523        # have been initialized.
1524        self._init_props(
1525            default_prop_types=default_prop_types, err_on_deprecated=err_on_deprecated
1526        )
1527        self._init_interrupts()
1528        self._init_pinctrls()
1529
1530    def _init_props(self, default_prop_types: bool = False,
1531                    err_on_deprecated: bool = False) -> None:
1532        # Creates self.props. See the class docstring. Also checks that all
1533        # properties on the node are declared in its binding.
1534
1535        self.props = {}
1536
1537        if self._binding:
1538            prop2specs = self._binding.prop2specs
1539        else:
1540            prop2specs = None
1541
1542        # Initialize self.props
1543        if prop2specs:
1544            for prop_spec in prop2specs.values():
1545                self._init_prop(prop_spec, err_on_deprecated)
1546            self._check_undeclared_props()
1547        elif default_prop_types:
1548            for name in self._node.props:
1549                if name not in _DEFAULT_PROP_SPECS:
1550                    continue
1551                prop_spec = _DEFAULT_PROP_SPECS[name]
1552                val = self._prop_val(name, prop_spec, err_on_deprecated)
1553                self.props[name] = Property(prop_spec, val, self)
1554
1555    def _init_prop(self, prop_spec: PropertySpec,
1556                   err_on_deprecated: bool) -> None:
1557        # _init_props() helper for initializing a single property.
1558        # 'prop_spec' is a PropertySpec object from the node's binding.
1559
1560        name = prop_spec.name
1561        prop_type = prop_spec.type
1562        if not prop_type:
1563            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1564
1565        val = self._prop_val(name, prop_spec, err_on_deprecated)
1566
1567        if val is None:
1568            # 'required: false' property that wasn't there, or a property type
1569            # for which we store no data.
1570            return
1571
1572        enum = prop_spec.enum
1573        for subval in val if isinstance(val, list) else [val]:
1574            if enum and subval not in enum:
1575                _err(f"value of property '{name}' on {self.path} in "
1576                    f"{self.edt.dts_path} ({subval!r}) is not in 'enum' list in "
1577                    f"{self.binding_path} ({enum!r})")
1578
1579        const = prop_spec.const
1580        if const is not None and val != const:
1581            _err(f"value of property '{name}' on {self.path} in "
1582                 f"{self.edt.dts_path} ({val!r}) "
1583                 "is different from the 'const' value specified in "
1584                 f"{self.binding_path} ({const!r})")
1585
1586        # Skip properties that start with '#', like '#size-cells', and mapping
1587        # properties like 'gpio-map'/'interrupt-map'
1588        if name[0] == "#" or name.endswith("-map"):
1589            return
1590
1591        self.props[name] = Property(prop_spec, val, self)
1592
1593    def _prop_val(
1594        self,
1595        name: str,
1596        prop_spec: PropertySpec,
1597        err_on_deprecated: bool,
1598    ) -> PropertyValType:
1599        # _init_prop() helper for getting the property's value
1600        #
1601        # name:
1602        #   Property name from binding
1603        #
1604        # prop_spec:
1605        #   PropertySpec from binding
1606        #
1607        # err_on_deprecated:
1608        #   If True, a deprecated property is an error instead of warning.
1609
1610        node = self._node
1611        prop = node.props.get(name)
1612        binding_path = prop_spec.binding.path
1613        prop_type = prop_spec.type
1614        deprecated = prop_spec.deprecated
1615        required = prop_spec.required
1616        default = prop_spec.default
1617        specifier_space = prop_spec.specifier_space
1618
1619        if prop and deprecated:
1620            msg = (
1621                f"'{name}' is marked as deprecated in 'properties:' "
1622                f"in {binding_path} for node {node.path}."
1623            )
1624            if err_on_deprecated:
1625                _err(msg)
1626            else:
1627                _LOG.warning(msg)
1628
1629        if not prop:
1630            if required and self.status == "okay":
1631                _err(
1632                    f"'{name}' is marked as required in 'properties:' in "
1633                    f"{binding_path}, but does not appear in {node!r}"
1634                )
1635
1636            if default is not None:
1637                # YAML doesn't have a native format for byte arrays. We need to
1638                # convert those from an array like [0x12, 0x34, ...]. The
1639                # format has already been checked in
1640                # _check_prop_by_type().
1641                if prop_type == "uint8-array":
1642                    return bytes(default) # type: ignore
1643                return default
1644
1645            return False if prop_type == "boolean" else None
1646
1647        if prop_type == "boolean":
1648            if prop.type != Type.EMPTY:
1649                _err(f"'{name}' in {node!r} is defined with 'type: boolean' "
1650                     f"in {binding_path}, but is assigned a value ('{prop}') "
1651                     f"instead of being empty ('{name};')")
1652            return True
1653
1654        if prop_type == "int":
1655            return prop.to_num()
1656
1657        if prop_type == "array":
1658            return prop.to_nums()
1659
1660        if prop_type == "uint8-array":
1661            return prop.to_bytes()
1662
1663        if prop_type == "string":
1664            return prop.to_string()
1665
1666        if prop_type == "string-array":
1667            return prop.to_strings()
1668
1669        if prop_type == "phandle":
1670            return self.edt._node2enode[prop.to_node()]
1671
1672        if prop_type == "phandles":
1673            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1674
1675        if prop_type == "phandle-array":
1676            # This type is a bit high-level for dtlib as it involves
1677            # information from bindings and *-names properties, so there's no
1678            # to_phandle_array() in dtlib. Do the type check ourselves.
1679            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1680                _err(f"expected property '{name}' in {node.path} in "
1681                     f"{node.dt.filename} to be assigned "
1682                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1683                     f"(a mix of phandles and numbers), not '{prop}'")
1684
1685            return self._standard_phandle_val_list(prop, specifier_space)
1686
1687        if prop_type == "path":
1688            return self.edt._node2enode[prop.to_path()]
1689
1690        # prop_type == "compound". Checking that the 'type:'
1691        # value is valid is done in _check_prop_by_type().
1692        #
1693        # 'compound' is a dummy type for properties that don't fit any of the
1694        # patterns above, so that we can require all entries in 'properties:'
1695        # to have a 'type: ...'. No Property object is created for it.
1696        return None
1697
1698    def _check_undeclared_props(self) -> None:
1699        # Checks that all properties are declared in the binding
1700        wl = {"compatible", "status", "ranges", "phandle",
1701              "interrupt-parent", "interrupts-extended", "device_type"}
1702
1703        for prop_name in self._node.props:
1704            # Allow a few special properties to not be declared in the binding
1705            if (prop_name.endswith("-controller")
1706                or prop_name.startswith("#")
1707                or prop_name in wl):
1708                continue
1709
1710            if TYPE_CHECKING:
1711                assert self._binding
1712
1713            if prop_name not in self._binding.prop2specs:
1714                _err(f"'{prop_name}' appears in {self._node.path} in "
1715                     f"{self.edt.dts_path}, but is not declared in "
1716                     f"'properties:' in {self.binding_path}")
1717
1718    def _init_ranges(self) -> None:
1719        # Initializes self.ranges
1720        node = self._node
1721
1722        self.ranges = []
1723
1724        if "ranges" not in node.props:
1725            return
1726
1727        raw_child_address_cells = node.props.get("#address-cells")
1728        parent_address_cells = _address_cells(node)
1729        if raw_child_address_cells is None:
1730            child_address_cells = 2 # Default value per DT spec.
1731        else:
1732            child_address_cells = raw_child_address_cells.to_num()
1733        raw_child_size_cells = node.props.get("#size-cells")
1734        if raw_child_size_cells is None:
1735            child_size_cells = 1 # Default value per DT spec.
1736        else:
1737            child_size_cells = raw_child_size_cells.to_num()
1738
1739        # Number of cells for one translation 3-tuple in 'ranges'
1740        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1741
1742        if entry_cells == 0:
1743            if len(node.props["ranges"].value) == 0:
1744                return
1745            else:
1746                _err(f"'ranges' should be empty in {self._node.path} since "
1747                     f"<#address-cells> = {child_address_cells}, "
1748                     f"<#address-cells for parent> = {parent_address_cells} and "
1749                     f"<#size-cells> = {child_size_cells}")
1750
1751        for raw_range in _slice(node, "ranges", 4*entry_cells,
1752                                f"4*(<#address-cells> (= {child_address_cells}) + "
1753                                "<#address-cells for parent> "
1754                                f"(= {parent_address_cells}) + "
1755                                f"<#size-cells> (= {child_size_cells}))"):
1756
1757            child_bus_cells = child_address_cells
1758            if child_address_cells == 0:
1759                child_bus_addr = None
1760            else:
1761                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1762            parent_bus_cells = parent_address_cells
1763            if parent_address_cells == 0:
1764                parent_bus_addr = None
1765            else:
1766                parent_bus_addr = to_num(
1767                    raw_range[(4*child_address_cells):
1768                              (4*child_address_cells + 4*parent_address_cells)])
1769            length_cells = child_size_cells
1770            if child_size_cells == 0:
1771                length = None
1772            else:
1773                length = to_num(
1774                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1775
1776            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1777                                     parent_bus_cells, parent_bus_addr,
1778                                     length_cells, length))
1779
1780    def _init_regs(self) -> None:
1781        # Initializes self.regs
1782
1783        node = self._node
1784
1785        self.regs = []
1786
1787        if "reg" not in node.props:
1788            return
1789
1790        address_cells = _address_cells(node)
1791        size_cells = _size_cells(node)
1792
1793        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1794                              f"4*(<#address-cells> (= {address_cells}) + "
1795                              f"<#size-cells> (= {size_cells}))"):
1796            if address_cells == 0:
1797                addr = None
1798            else:
1799                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1800            if size_cells == 0:
1801                size = None
1802            else:
1803                size = to_num(raw_reg[4*address_cells:])
1804            # Size zero is ok for PCI devices
1805            if size_cells != 0 and size == 0 and not self.is_pci_device:
1806                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1807                     "(maybe you want a size of one or #size-cells = 0 "
1808                     "instead)")
1809
1810            # We'll fix up the name when we're done.
1811            self.regs.append(Register(self, None, addr, size))
1812
1813        _add_names(node, "reg", self.regs)
1814
1815    def _init_pinctrls(self) -> None:
1816        # Initializes self.pinctrls from any pinctrl-<index> properties
1817
1818        node = self._node
1819
1820        # pinctrl-<index> properties
1821        pinctrl_props = [prop for name, prop in node.props.items()
1822                         if re.match("pinctrl-[0-9]+", name)]
1823        # Sort by index
1824        pinctrl_props.sort(key=lambda prop: prop.name)
1825
1826        # Check indices
1827        for i, prop in enumerate(pinctrl_props):
1828            if prop.name != "pinctrl-" + str(i):
1829                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1830                     "- indices should be contiguous and start from zero")
1831
1832        self.pinctrls = []
1833        for prop in pinctrl_props:
1834            # We'll fix up the names below.
1835            self.pinctrls.append(PinCtrl(
1836                node=self,
1837                name=None,
1838                conf_nodes=[self.edt._node2enode[node]
1839                            for node in prop.to_nodes()]))
1840
1841        _add_names(node, "pinctrl", self.pinctrls)
1842
1843    def _init_interrupts(self) -> None:
1844        # Initializes self.interrupts
1845
1846        node = self._node
1847
1848        self.interrupts = []
1849
1850        for controller_node, data in _interrupts(node):
1851            # We'll fix up the names below.
1852            controller = self.edt._node2enode[controller_node]
1853            self.interrupts.append(ControllerAndData(
1854                node=self, controller=controller,
1855                data=self._named_cells(controller, data, "interrupt"),
1856                name=None, basename=None))
1857
1858        _add_names(node, "interrupt", self.interrupts)
1859
1860    def _standard_phandle_val_list(
1861            self,
1862            prop: dtlib_Property,
1863            specifier_space: Optional[str]
1864    ) -> list[Optional[ControllerAndData]]:
1865        # Parses a property like
1866        #
1867        #     <prop.name> = <phandle cell phandle cell ...>;
1868        #
1869        # where each phandle points to a controller node that has a
1870        #
1871        #     #<specifier_space>-cells = <size>;
1872        #
1873        # property that gives the number of cells in the value after the
1874        # controller's phandle in the property.
1875        #
1876        # E.g. with a property like
1877        #
1878        #     pwms = <&foo 1 2 &bar 3>;
1879        #
1880        # If 'specifier_space' is "pwm", then we should have this elsewhere
1881        # in the tree:
1882        #
1883        #     foo: ... {
1884        #             #pwm-cells = <2>;
1885        #     };
1886        #
1887        #     bar: ... {
1888        #             #pwm-cells = <1>;
1889        #     };
1890        #
1891        # These values can be given names using the <specifier_space>-names:
1892        # list in the binding for the phandle nodes.
1893        #
1894        # Also parses any
1895        #
1896        #     <specifier_space>-names = "...", "...", ...
1897        #
1898        # Returns a list of Optional[ControllerAndData] instances.
1899        #
1900        # An index is None if the underlying phandle-array element is
1901        # unspecified.
1902
1903        if not specifier_space:
1904            if prop.name.endswith("gpios"):
1905                # There's some slight special-casing for *-gpios properties in that
1906                # e.g. foo-gpios still maps to #gpio-cells rather than
1907                # #foo-gpio-cells
1908                specifier_space = "gpio"
1909            else:
1910                # Strip -s. We've already checked that property names end in -s
1911                # if there is no specifier space in _check_prop_by_type().
1912                specifier_space = prop.name[:-1]
1913
1914        res: list[Optional[ControllerAndData]] = []
1915
1916        for item in _phandle_val_list(prop, specifier_space):
1917            if item is None:
1918                res.append(None)
1919                continue
1920
1921            controller_node, data = item
1922            mapped_controller, mapped_data = (
1923                _map_phandle_array_entry(prop.node, controller_node,
1924                                         data, specifier_space))
1925
1926            controller = self.edt._node2enode[mapped_controller]
1927            # We'll fix up the names below.
1928            res.append(ControllerAndData(
1929                node=self, controller=controller,
1930                data=self._named_cells(controller, mapped_data,
1931                                       specifier_space),
1932                name=None, basename=specifier_space))
1933
1934        _add_names(self._node, specifier_space, res)
1935
1936        return res
1937
1938    def _named_cells(
1939            self,
1940            controller: 'Node',
1941            data: bytes,
1942            basename: str
1943    ) -> dict[str, int]:
1944        # Returns a dictionary that maps <basename>-cells names given in the
1945        # binding for 'controller' to cell values. 'data' is the raw data, as a
1946        # byte array.
1947
1948        if not controller._binding:
1949            _err(f"{basename} controller {controller._node!r} "
1950                 f"for {self._node!r} lacks binding")
1951
1952        if basename in controller._binding.specifier2cells:
1953            cell_names: list[str] = controller._binding.specifier2cells[basename]
1954        else:
1955            # Treat no *-cells in the binding the same as an empty *-cells, so
1956            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1957            # '#clock-cells = <0>'.
1958            cell_names = []
1959
1960        data_list = to_nums(data)
1961        if len(data_list) != len(cell_names):
1962            _err(f"unexpected '{basename}-cells:' length in binding for "
1963                 f"{controller._node!r} - {len(cell_names)} "
1964                 f"instead of {len(data_list)}")
1965
1966        return dict(zip(cell_names, data_list, strict=False))
1967
1968
1969class EDT:
1970    """
1971    Represents a devicetree augmented with information from bindings.
1972
1973    These attributes are available on EDT objects:
1974
1975    nodes:
1976      A list of Node objects for the nodes that appear in the devicetree
1977
1978    compat2nodes:
1979      A collections.defaultdict that maps each 'compatible' string that appears
1980      on some Node to a list of Nodes with that compatible.
1981      The collection is sorted so that enabled nodes appear first in the
1982      collection.
1983
1984    compat2okay:
1985      Like compat2nodes, but just for nodes with status 'okay'.
1986
1987    compat2notokay:
1988      Like compat2nodes, but just for nodes with status not 'okay'.
1989
1990    compat2vendor:
1991      A collections.defaultdict that maps each 'compatible' string that appears
1992      on some Node to a vendor name parsed from vendor_prefixes.
1993
1994    compat2model:
1995      A collections.defaultdict that maps each 'compatible' string that appears
1996      on some Node to a model name parsed from that compatible.
1997
1998    label2node:
1999      A dict that maps a node label to the node with that label.
2000
2001    dep_ord2node:
2002      A dict that maps an ordinal to the node with that dependency ordinal.
2003
2004    chosen_nodes:
2005      A dict that maps the properties defined on the devicetree's /chosen
2006      node to their values. 'chosen' is indexed by property name (a string),
2007      and values are converted to Node objects. Note that properties of the
2008      /chosen node which can't be converted to a Node are not included in
2009      the value.
2010
2011    dts_path:
2012      The .dts path passed to __init__()
2013
2014    dts_source:
2015      The final DTS source code of the loaded devicetree after merging nodes
2016      and processing /delete-node/ and /delete-property/, as a string
2017
2018    bindings_dirs:
2019      The bindings directory paths passed to __init__()
2020
2021    scc_order:
2022      A list of lists of Nodes. All elements of each list
2023      depend on each other, and the Nodes in any list do not depend
2024      on any Node in a subsequent list. Each list defines a Strongly
2025      Connected Component (SCC) of the graph.
2026
2027      For an acyclic graph each list will be a singleton. Cycles
2028      will be represented by lists with multiple nodes. Cycles are
2029      not expected to be present in devicetree graphs.
2030
2031    The standard library's pickle module can be used to marshal and
2032    unmarshal EDT objects.
2033    """
2034
2035    def __init__(self,
2036                 dts: Optional[str],
2037                 bindings_dirs: list[str],
2038                 workspace_dir: Optional[str] = None,
2039                 warn_reg_unit_address_mismatch: bool = True,
2040                 default_prop_types: bool = True,
2041                 support_fixed_partitions_on_any_bus: bool = True,
2042                 infer_binding_for_paths: Optional[Iterable[str]] = None,
2043                 vendor_prefixes: Optional[dict[str, str]] = None,
2044                 werror: bool = False,
2045                 warn_bus_mismatch: bool = False):
2046        """EDT constructor.
2047
2048        dts:
2049          Path to devicetree .dts file. Passing None for this value
2050          is only for internal use; do not do that outside of edtlib.
2051
2052        bindings_dirs:
2053          List of paths to directories containing bindings, in YAML format.
2054          These directories are recursively searched for .yaml files.
2055
2056        workspace_dir:
2057          Path to the root of the Zephyr workspace. This is used as a base
2058          directory for relative paths in the generated devicetree comments.
2059
2060        warn_reg_unit_address_mismatch (default: True):
2061          If True, a warning is logged if a node has a 'reg' property where
2062          the address of the first entry does not match the unit address of the
2063          node
2064
2065        default_prop_types (default: True):
2066          If True, default property types will be used when a node has no
2067          bindings.
2068
2069        support_fixed_partitions_on_any_bus (default True):
2070          If True, set the Node.bus for 'fixed-partitions' compatible nodes
2071          to None.  This allows 'fixed-partitions' binding to match regardless
2072          of the bus the 'fixed-partition' is under.
2073
2074        infer_binding_for_paths (default: None):
2075          An iterable of devicetree paths identifying nodes for which bindings
2076          should be inferred from the node content.  (Child nodes are not
2077          processed.)  Pass none if no nodes should support inferred bindings.
2078
2079        vendor_prefixes (default: None):
2080          A dict mapping vendor prefixes in compatible properties to their
2081          descriptions. If given, compatibles in the form "manufacturer,device"
2082          for which "manufacturer" is neither a key in the dict nor a specially
2083          exempt set of legacy cases will cause warnings.
2084
2085        werror (default: False):
2086          If True, some edtlib specific warnings become errors. This currently
2087          errors out if 'dts' has any deprecated properties set, or an unknown
2088          vendor prefix is used.
2089
2090        warn_bus_mismatch (default: False):
2091          If True, a warning is logged if a node's actual bus does not match
2092            the bus specified in its binding.
2093        """
2094        # All instance attributes should be initialized here.
2095        # This makes it easy to keep track of them, which makes
2096        # implementing __deepcopy__() easier.
2097        # If you change this, make sure to update __deepcopy__() too,
2098        # and update the tests for that method.
2099
2100        # Public attributes (the rest are properties)
2101        self.nodes: list[Node] = []
2102        self.compat2nodes: dict[str, list[Node]] = defaultdict(list)
2103        self.compat2okay: dict[str, list[Node]] = defaultdict(list)
2104        self.compat2notokay: dict[str, list[Node]] = defaultdict(list)
2105        self.compat2vendor: dict[str, str] = defaultdict(str)
2106        self.compat2model: dict[str, str]  = defaultdict(str)
2107        self.label2node: dict[str, Node] = {}
2108        self.dep_ord2node: dict[int, Node] = {}
2109        self.dts_path: str = dts # type: ignore
2110        self.bindings_dirs: list[str] = list(bindings_dirs)
2111
2112        # Saved kwarg values for internal use
2113        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
2114        self._default_prop_types: bool = default_prop_types
2115        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
2116        self._infer_binding_for_paths: set[str] = set(infer_binding_for_paths or [])
2117        self._vendor_prefixes: dict[str, str] = vendor_prefixes or {}
2118        self._werror: bool = bool(werror)
2119        self._warn_bus_mismatch: bool = warn_bus_mismatch
2120
2121        # Other internal state
2122        self._compat2binding: dict[tuple[str, Optional[str]], Binding] = {}
2123        self._graph: Graph = Graph()
2124        self._binding_paths: list[str] = _binding_paths(self.bindings_dirs)
2125        self._binding_fname2path: dict[str, str] = {
2126            os.path.basename(path): path
2127            for path in self._binding_paths
2128        }
2129        self._node2enode: dict[dtlib_Node, Node] = {}
2130
2131        if dts is not None:
2132            try:
2133                self._dt = DT(dts, base_dir=workspace_dir)
2134            except DTError as e:
2135                raise EDTError(e) from e
2136            self._finish_init()
2137
2138    def _finish_init(self) -> None:
2139        # This helper exists to make the __deepcopy__() implementation
2140        # easier to keep in sync with __init__().
2141        _check_dt(self._dt)
2142
2143        self._init_compat2binding()
2144        self._init_nodes()
2145        self._init_graph()
2146        self._init_luts()
2147
2148        self._check()
2149
2150    def get_node(self, path: str) -> Node:
2151        """
2152        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
2153        path or alias doesn't exist.
2154        """
2155        try:
2156            return self._node2enode[self._dt.get_node(path)]
2157        except DTError as e:
2158            _err(e)
2159
2160    @property
2161    def chosen_nodes(self) -> dict[str, Node]:
2162        ret: dict[str, Node] = {}
2163
2164        try:
2165            chosen = self._dt.get_node("/chosen")
2166        except DTError:
2167            return ret
2168
2169        for name, prop in chosen.props.items():
2170            try:
2171                node = prop.to_path()
2172            except DTError:
2173                # DTS value is not phandle or string, or path doesn't exist
2174                continue
2175
2176            ret[name] = self._node2enode[node]
2177
2178        return ret
2179
2180    def chosen_node(self, name: str) -> Optional[Node]:
2181        """
2182        Returns the Node pointed at by the property named 'name' in /chosen, or
2183        None if the property is missing
2184        """
2185        return self.chosen_nodes.get(name)
2186
2187    @property
2188    def dts_source(self) -> str:
2189        return f"{self._dt}"
2190
2191    def __repr__(self) -> str:
2192        return (f"<EDT for '{self.dts_path}', binding directories "
2193                f"'{self.bindings_dirs}'>")
2194
2195    def __deepcopy__(self, memo) -> 'EDT':
2196        """
2197        Implements support for the standard library copy.deepcopy()
2198        function on EDT instances.
2199        """
2200
2201        ret = EDT(
2202            None,
2203            self.bindings_dirs,
2204            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2205            default_prop_types=self._default_prop_types,
2206            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2207            infer_binding_for_paths=set(self._infer_binding_for_paths),
2208            vendor_prefixes=dict(self._vendor_prefixes),
2209            werror=self._werror
2210        )
2211        ret.dts_path = self.dts_path
2212        ret._dt = deepcopy(self._dt, memo)
2213        ret._finish_init()
2214        return ret
2215
2216    @property
2217    def scc_order(self) -> list[list[Node]]:
2218        try:
2219            return self._graph.scc_order()
2220        except Exception as e:
2221            raise EDTError(e) from None
2222
2223    def _process_properties_r(self, root_node: Node, props_node: Node) -> None:
2224        """
2225        Process props_node properties for dependencies, and add those as
2226        dependencies of root_node. Then walk through all the props_node
2227        children and do the same recursively, maintaining the same root_node.
2228
2229        This ensures that on a node with child nodes, the parent node includes
2230        the dependencies of all the child nodes as well as its own.
2231        """
2232        # A Node depends on any Nodes present in 'phandle',
2233        # 'phandles', or 'phandle-array' property values.
2234        for prop in props_node.props.values():
2235            if prop.type == 'phandle':
2236                self._graph.add_edge(root_node, prop.val)
2237            elif prop.type == 'phandles':
2238                if TYPE_CHECKING:
2239                    assert isinstance(prop.val, list)
2240                for phandle_node in prop.val:
2241                    self._graph.add_edge(root_node, phandle_node)
2242            elif prop.type == 'phandle-array':
2243                if TYPE_CHECKING:
2244                    assert isinstance(prop.val, list)
2245                for cd in prop.val:
2246                    if cd is None:
2247                        continue
2248                    if TYPE_CHECKING:
2249                        assert isinstance(cd, ControllerAndData)
2250                    self._graph.add_edge(root_node, cd.controller)
2251
2252        # A Node depends on whatever supports the interrupts it
2253        # generates.
2254        for intr in props_node.interrupts:
2255            self._graph.add_edge(root_node, intr.controller)
2256
2257        # If the binding defines child bindings, link the child properties to
2258        # the root_node as well.
2259        if props_node.has_child_binding:
2260            for child in props_node.children.values():
2261                if "compatible" in child.props:
2262                    # Not a child node, normal node on a different binding.
2263                    continue
2264                self._process_properties_r(root_node, child)
2265
2266    def _process_properties(self, node: Node) -> None:
2267        """
2268        Add node dependencies based on own as well as child node properties,
2269        start from the node itself.
2270        """
2271        self._process_properties_r(node, node)
2272
2273    def _init_graph(self) -> None:
2274        # Constructs a graph of dependencies between Node instances,
2275        # which is usable for computing a partial order over the dependencies.
2276        # The algorithm supports detecting dependency loops.
2277        #
2278        # Actually computing the SCC order is lazily deferred to the
2279        # first time the scc_order property is read.
2280
2281        for node in self.nodes:
2282            # Always insert root node
2283            if not node.parent:
2284                self._graph.add_node(node)
2285
2286            # A Node always depends on its parent.
2287            for child in node.children.values():
2288                self._graph.add_edge(child, node)
2289
2290            self._process_properties(node)
2291
2292    def _init_compat2binding(self) -> None:
2293        # Creates self._compat2binding, a dictionary that maps
2294        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2295        #
2296        # The Binding objects are created from YAML files discovered
2297        # in self.bindings_dirs as needed.
2298        #
2299        # For example, self._compat2binding["company,dev", "can"]
2300        # contains the Binding for the 'company,dev' device, when it
2301        # appears on the CAN bus.
2302        #
2303        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2304        # self._compat2binding["company,notonbus", None] is the Binding.
2305        #
2306        # Only bindings for 'compatible' strings that appear in the devicetree
2307        # are loaded.
2308
2309        dt_compats = _dt_compats(self._dt)
2310        # Searches for any 'compatible' string mentioned in the devicetree
2311        # files, with a regex
2312        dt_compats_search = re.compile(
2313            "|".join(re.escape(compat) for compat in dt_compats)
2314        ).search
2315
2316        for binding_path in self._binding_paths:
2317            with open(binding_path, encoding="utf-8") as f:
2318                contents = f.read()
2319
2320            # As an optimization, skip parsing files that don't contain any of
2321            # the .dts 'compatible' strings, which should be reasonably safe
2322            if not dt_compats_search(contents):
2323                continue
2324
2325            # Load the binding and check that it actually matches one of the
2326            # compatibles. Might get false positives above due to comments and
2327            # stuff.
2328
2329            try:
2330                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2331                # representing the file)
2332                raw = yaml.load(contents, Loader=_BindingLoader)
2333            except yaml.YAMLError as e:
2334                _err(
2335                        f"'{binding_path}' appears in binding directories "
2336                        f"but isn't valid YAML: {e}")
2337
2338            # Convert the raw data to a Binding object, erroring out
2339            # if necessary.
2340            binding = self._binding(raw, binding_path, dt_compats)
2341
2342            # Register the binding in self._compat2binding, along with
2343            # any child bindings that have their own compatibles.
2344            while binding is not None:
2345                if binding.compatible:
2346                    self._register_binding(binding)
2347                binding = binding.child_binding
2348
2349    def _binding(self,
2350                 raw: Optional[dict],
2351                 binding_path: str,
2352                 dt_compats: set[str]) -> Optional[Binding]:
2353        # Convert a 'raw' binding from YAML to a Binding object and return it.
2354        #
2355        # Error out if the raw data looks like an invalid binding.
2356        #
2357        # Return None if the file doesn't contain a binding or the
2358        # binding's compatible isn't in dt_compats.
2359
2360        # Get the 'compatible:' string.
2361        if raw is None or "compatible" not in raw:
2362            # Empty file, binding fragment, spurious file, etc.
2363            return None
2364
2365        compatible = raw["compatible"]
2366
2367        if compatible not in dt_compats:
2368            # Not a compatible we care about.
2369            return None
2370
2371        # Initialize and return the Binding object.
2372        return Binding(binding_path, self._binding_fname2path, raw=raw)
2373
2374    def _register_binding(self, binding: Binding) -> None:
2375        # Do not allow two different bindings to have the same
2376        # 'compatible:'/'on-bus:' combo
2377        if TYPE_CHECKING:
2378            assert binding.compatible
2379        old_binding = self._compat2binding.get((binding.compatible,
2380                                                binding.on_bus))
2381        if old_binding:
2382            msg = (f"both {old_binding.path} and {binding.path} have "
2383                   f"'compatible: {binding.compatible}'")
2384            if binding.on_bus is not None:
2385                msg += f" and 'on-bus: {binding.on_bus}'"
2386            _err(msg)
2387
2388        # Register the binding.
2389        self._compat2binding[binding.compatible, binding.on_bus] = binding
2390
2391    def _init_nodes(self) -> None:
2392        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2393        # self.nodes
2394
2395        hash2node: dict[str, Node] = {}
2396
2397        for dt_node in self._dt.node_iter():
2398            # Warning: We depend on parent Nodes being created before their
2399            # children. This is guaranteed by node_iter().
2400            node = Node(dt_node, self, self._fixed_partitions_no_bus)
2401
2402            if node.hash in hash2node:
2403                _err(f"hash collision between '{node.path}' and "
2404                     f"'{hash2node[node.hash].path}'")
2405            hash2node[node.hash] = node
2406
2407            self.nodes.append(node)
2408            self._node2enode[dt_node] = node
2409
2410        for node in self.nodes:
2411            # Initialize properties that may depend on other Node objects having
2412            # been created, because they (either always or sometimes) reference
2413            # other nodes. Must be called separately after all nodes have been
2414            # created.
2415            node._init_crossrefs(
2416                default_prop_types=self._default_prop_types,
2417                err_on_deprecated=self._werror,
2418            )
2419
2420        if self._warn_reg_unit_address_mismatch:
2421            # This warning matches the simple_bus_reg warning in dtc
2422            for node in self.nodes:
2423                # Address mismatch is ok for PCI devices
2424                if (node.regs and node.regs[0].addr != node.unit_addr and
2425                        not node.is_pci_device):
2426                    _LOG.warning("unit address and first address in 'reg' "
2427                                 f"(0x{node.regs[0].addr:x}) don't match for "
2428                                 f"{node.path}")
2429
2430    def _init_luts(self) -> None:
2431        # Initialize node lookup tables (LUTs).
2432
2433        for node in self.nodes:
2434            for label in node.labels:
2435                self.label2node[label] = node
2436
2437            for compat in node.compats:
2438                if node.status == "okay":
2439                    self.compat2okay[compat].append(node)
2440                else:
2441                    self.compat2notokay[compat].append(node)
2442
2443                if compat in self.compat2vendor:
2444                    continue
2445
2446                # The regular expression comes from dt-schema.
2447                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2448                if not re.match(compat_re, compat):
2449                    _err(f"node '{node.path}' compatible '{compat}' "
2450                         'must match this regular expression: '
2451                         f"'{compat_re}'")
2452
2453                if ',' in compat and self._vendor_prefixes:
2454                    vendor, model = compat.split(',', 1)
2455                    if vendor in self._vendor_prefixes:
2456                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2457                        self.compat2model[compat] = model
2458
2459                    # As an exception, the root node can have whatever
2460                    # compatibles it wants. Other nodes get checked.
2461                    elif node.path != '/':
2462                        if self._werror:
2463                            handler_fn: Any = _err
2464                        else:
2465                            handler_fn = _LOG.warning
2466                        handler_fn(
2467                            f"node '{node.path}' compatible '{compat}' "
2468                            f"has unknown vendor prefix '{vendor}'")
2469
2470        for compat, nodes in self.compat2okay.items():
2471            self.compat2nodes[compat].extend(nodes)
2472
2473        for compat, nodes in self.compat2notokay.items():
2474            self.compat2nodes[compat].extend(nodes)
2475
2476        for nodeset in self.scc_order:
2477            node = nodeset[0]
2478            self.dep_ord2node[node.dep_ordinal] = node
2479
2480    def _check(self) -> None:
2481        # Tree-wide checks and warnings.
2482
2483        for binding in self._compat2binding.values():
2484            for spec in binding.prop2specs.values():
2485                if not spec.enum or spec.type != 'string':
2486                    continue
2487
2488                if not spec.enum_tokenizable:
2489                    _LOG.warning(
2490                        f"compatible '{binding.compatible}' "
2491                        f"in binding '{binding.path}' has non-tokenizable enum "
2492                        f"for property '{spec.name}': " +
2493                        ', '.join(repr(x) for x in spec.enum))
2494                elif not spec.enum_upper_tokenizable:
2495                    _LOG.warning(
2496                        f"compatible '{binding.compatible}' "
2497                        f"in binding '{binding.path}' has enum for property "
2498                        f"'{spec.name}' that is only tokenizable "
2499                        'in lowercase: ' +
2500                        ', '.join(repr(x) for x in spec.enum))
2501
2502        # Validate the contents of compatible properties.
2503        for node in self.nodes:
2504            if 'compatible' not in node.props:
2505                continue
2506
2507            compatibles = node.props['compatible'].val
2508
2509            # _check() runs after _init_compat2binding() has called
2510            # _dt_compats(), which already converted every compatible
2511            # property to a list of strings. So we know 'compatibles'
2512            # is a list, but add an assert for future-proofing.
2513            assert isinstance(compatibles, list)
2514
2515            for compat in compatibles:
2516                # This is also just for future-proofing.
2517                assert isinstance(compat, str)
2518
2519
2520def bindings_from_paths(yaml_paths: list[str],
2521                        ignore_errors: bool = False) -> list[Binding]:
2522    """
2523    Get a list of Binding objects from the yaml files 'yaml_paths'.
2524
2525    If 'ignore_errors' is True, YAML files that cause an EDTError when
2526    loaded are ignored. (No other exception types are silenced.)
2527    """
2528
2529    ret = []
2530    fname2path = {os.path.basename(path): path for path in yaml_paths}
2531    for path in yaml_paths:
2532        try:
2533            ret.append(Binding(path, fname2path))
2534        except EDTError:
2535            if ignore_errors:
2536                continue
2537            raise
2538
2539    return ret
2540
2541
2542class EDTError(Exception):
2543    "Exception raised for devicetree- and binding-related errors"
2544
2545#
2546# Public global functions
2547#
2548
2549
2550def load_vendor_prefixes_txt(vendor_prefixes: str) -> dict[str, str]:
2551    """Load a vendor-prefixes.txt file and return a dict
2552    representation mapping a vendor prefix to the vendor name.
2553    """
2554    vnd2vendor: dict[str, str] = {}
2555    with open(vendor_prefixes, encoding='utf-8') as f:
2556        for line in f:
2557            line = line.strip()
2558
2559            if not line or line.startswith('#'):
2560                # Comment or empty line.
2561                continue
2562
2563            # Other lines should be in this form:
2564            #
2565            # <vnd><TAB><vendor>
2566            vnd_vendor = line.split('\t', 1)
2567            assert len(vnd_vendor) == 2, line
2568            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2569    return vnd2vendor
2570
2571#
2572# Private global functions
2573#
2574
2575
2576def _dt_compats(dt: DT) -> set[str]:
2577    # Returns a set() with all 'compatible' strings in the devicetree
2578    # represented by dt (a dtlib.DT instance)
2579
2580    return {compat
2581            for node in dt.node_iter()
2582            if "compatible" in node.props
2583            for compat in node.props["compatible"].to_strings()}
2584
2585
2586def _binding_paths(bindings_dirs: list[str]) -> list[str]:
2587    # Returns a list with the paths to all bindings (.yaml files) in
2588    # 'bindings_dirs'
2589
2590    return [os.path.join(root, filename)
2591            for bindings_dir in bindings_dirs
2592            for root, _, filenames in os.walk(bindings_dir)
2593            for filename in filenames
2594            if filename.endswith((".yaml", ".yml"))]
2595
2596
2597def _binding_inc_error(msg):
2598    # Helper for reporting errors in the !include implementation
2599
2600    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2601
2602
2603def _check_include_dict(name: Optional[str],
2604                        allowlist: Optional[list[str]],
2605                        blocklist: Optional[list[str]],
2606                        child_filter: Optional[dict],
2607                        binding_path: Optional[str]) -> None:
2608    # Check that an 'include:' named 'name' with property-allowlist
2609    # 'allowlist', property-blocklist 'blocklist', and
2610    # child-binding filter 'child_filter' has valid structure.
2611
2612    if name is None:
2613        _err(f"'include:' element in {binding_path} "
2614             "should have a 'name' key")
2615
2616    if allowlist is not None and blocklist is not None:
2617        _err(f"'include:' of file '{name}' in {binding_path} "
2618             "should not specify both 'property-allowlist:' "
2619             "and 'property-blocklist:'")
2620
2621    while child_filter is not None:
2622        child_copy = deepcopy(child_filter)
2623        child_allowlist: Optional[list[str]] = (
2624            child_copy.pop('property-allowlist', None))
2625        child_blocklist: Optional[list[str]] = (
2626            child_copy.pop('property-blocklist', None))
2627        next_child_filter: Optional[dict] = (
2628            child_copy.pop('child-binding', None))
2629
2630        if child_copy:
2631            # We've popped out all the valid keys.
2632            _err(f"'include:' of file '{name}' in {binding_path} "
2633                 "should not have these unexpected contents in a "
2634                 f"'child-binding': {child_copy}")
2635
2636        if child_allowlist is not None and child_blocklist is not None:
2637            _err(f"'include:' of file '{name}' in {binding_path} "
2638                 "should not specify both 'property-allowlist:' and "
2639                 "'property-blocklist:' in a 'child-binding:'")
2640
2641        child_filter = next_child_filter
2642
2643
2644def _filter_properties(raw: dict,
2645                       allowlist: Optional[list[str]],
2646                       blocklist: Optional[list[str]],
2647                       child_filter: Optional[dict],
2648                       binding_path: Optional[str]) -> None:
2649    # Destructively modifies 'raw["properties"]' and
2650    # 'raw["child-binding"]', if they exist, according to
2651    # 'allowlist', 'blocklist', and 'child_filter'.
2652
2653    props = raw.get('properties')
2654    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2655
2656    child_binding = raw.get('child-binding')
2657    while child_filter is not None and child_binding is not None:
2658        _filter_properties_helper(child_binding.get('properties'),
2659                                  child_filter.get('property-allowlist'),
2660                                  child_filter.get('property-blocklist'),
2661                                  binding_path)
2662        child_filter = child_filter.get('child-binding')
2663        child_binding = child_binding.get('child-binding')
2664
2665
2666def _filter_properties_helper(props: Optional[dict],
2667                              allowlist: Optional[list[str]],
2668                              blocklist: Optional[list[str]],
2669                              binding_path: Optional[str]) -> None:
2670    if props is None or (allowlist is None and blocklist is None):
2671        return
2672
2673    _check_prop_filter('property-allowlist', allowlist, binding_path)
2674    _check_prop_filter('property-blocklist', blocklist, binding_path)
2675
2676    if allowlist is not None:
2677        allowset = set(allowlist)
2678        to_del = [prop for prop in props if prop not in allowset]
2679    else:
2680        if TYPE_CHECKING:
2681            assert blocklist
2682        blockset = set(blocklist)
2683        to_del = [prop for prop in props if prop in blockset]
2684
2685    for prop in to_del:
2686        del props[prop]
2687
2688
2689def _check_prop_filter(name: str, value: Optional[list[str]],
2690                       binding_path: Optional[str]) -> None:
2691    # Ensure an include: ... property-allowlist or property-blocklist
2692    # is a list.
2693
2694    if value is None:
2695        return
2696
2697    if not isinstance(value, list):
2698        _err(f"'{name}' value {value} in {binding_path} should be a list")
2699
2700
2701def _merge_props(to_dict: dict,
2702                 from_dict: dict,
2703                 parent: Optional[str],
2704                 binding_path: Optional[str],
2705                 check_required: bool = False):
2706    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2707    #
2708    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2709    # property, then the values are ORed together.
2710    #
2711    # If 'check_required' is True, then an error is raised if 'from_dict' has
2712    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2713    # bindings from "downgrading" requirements from bindings they include,
2714    # which might help keep bindings well-organized.
2715    #
2716    # It's an error for most other keys to appear in both 'from_dict' and
2717    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2718    # precedence.
2719    #
2720    # 'parent' is the name of the parent key containing 'to_dict' and
2721    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2722    # These are used to generate errors for sketchy property overwrites.
2723
2724    for prop in from_dict:
2725        if (isinstance(to_dict.get(prop), dict)
2726            and isinstance(from_dict[prop], dict)):
2727            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2728                         check_required)
2729        elif prop not in to_dict:
2730            to_dict[prop] = from_dict[prop]
2731        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2732            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2733                 f"from included file overwritten ('{from_dict[prop]}' "
2734                 f"replaced with '{to_dict[prop]}')")
2735        elif prop == "required":
2736            # Need a separate check here, because this code runs before
2737            # Binding._check()
2738            if not (isinstance(from_dict["required"], bool) and
2739                    isinstance(to_dict["required"], bool)):
2740                _err(f"malformed 'required:' setting for '{parent}' in "
2741                     f"'properties' in {binding_path}, expected true/false")
2742
2743            # 'required: true' takes precedence
2744            to_dict["required"] = to_dict["required"] or from_dict["required"]
2745
2746
2747def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2748                   check_required: bool) -> bool:
2749    # _merge_props() helper. Returns True in cases where it's bad that
2750    # to_dict[prop] takes precedence over from_dict[prop].
2751
2752    if to_dict[prop] == from_dict[prop]:
2753        return False
2754
2755    # These are overridden deliberately
2756    if prop in {"title", "description", "compatible"}:
2757        return False
2758
2759    if prop == "required":
2760        if not check_required:
2761            return False
2762        return from_dict[prop] and not to_dict[prop]
2763
2764    return True
2765
2766
2767def _binding_include(loader, node):
2768    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2769    # just becomes [foo, bar].
2770
2771    if isinstance(node, yaml.ScalarNode):
2772        # !include foo.yaml
2773        return [loader.construct_scalar(node)]
2774
2775    if isinstance(node, yaml.SequenceNode):
2776        # !include [foo.yaml, bar.yaml]
2777        return loader.construct_sequence(node)
2778
2779    _binding_inc_error("unrecognised node type in !include statement")
2780
2781
2782def _check_prop_by_type(prop_name: str,
2783                        options: dict,
2784                        binding_path: Optional[str]) -> None:
2785    # Binding._check_properties() helper. Checks 'type:', 'default:',
2786    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2787
2788    prop_type = options.get("type")
2789    default = options.get("default")
2790    const = options.get("const")
2791
2792    if prop_type is None:
2793        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2794             f"{binding_path}")
2795
2796    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2797                "string-array", "phandle", "phandles", "phandle-array",
2798                "path", "compound"}
2799
2800    if prop_type not in ok_types:
2801        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2802             f"has unknown type '{prop_type}', expected one of " +
2803             ", ".join(ok_types))
2804
2805    if "specifier-space" in options and prop_type != "phandle-array":
2806        _err(f"'specifier-space' in 'properties: {prop_name}' "
2807             f"has type '{prop_type}', expected 'phandle-array'")
2808
2809    if (prop_type == "phandle-array"
2810        and not prop_name.endswith("s")
2811        and "specifier-space" not in options):
2812        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2813             f"has type 'phandle-array' and its name does not end in 's', "
2814             f"but no 'specifier-space' was provided.")
2815
2816    # If you change const_types, be sure to update the type annotation
2817    # for PropertySpec.const.
2818    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2819    if const and prop_type not in const_types:
2820        _err(f"const in {binding_path} for property '{prop_name}' "
2821             f"has type '{prop_type}', expected one of " +
2822             ", ".join(const_types))
2823
2824    # Check default
2825
2826    if default is None:
2827        return
2828
2829    if prop_type in {"boolean", "compound", "phandle", "phandles",
2830                     "phandle-array", "path"}:
2831        _err("'default:' can't be combined with "
2832             f"'type: {prop_type}' for '{prop_name}' in "
2833             f"'properties:' in {binding_path}")
2834
2835    def ok_default() -> bool:
2836        # Returns True if 'default' is an okay default for the property's type.
2837        # If you change this, be sure to update the type annotation for
2838        # PropertySpec.default.
2839
2840        if (prop_type == "int" and isinstance(default, int)
2841            or prop_type == "string" and isinstance(default, str)):
2842            return True
2843
2844        # array, uint8-array, or string-array
2845
2846        if not isinstance(default, list):
2847            return False
2848
2849        if (prop_type == "array"
2850            and all(isinstance(val, int) for val in default)):
2851            return True
2852
2853        if (prop_type == "uint8-array"
2854            and all(isinstance(val, int)
2855                    and 0 <= val <= 255 for val in default)):
2856            return True
2857
2858        # string-array
2859        return all(isinstance(val, str) for val in default)
2860
2861    if not ok_default():
2862        _err(f"'default: {default}' is invalid for '{prop_name}' "
2863             f"in 'properties:' in {binding_path}, "
2864             f"which has type {prop_type}")
2865
2866
2867def _translate(addr: int, node: dtlib_Node) -> int:
2868    # Recursively translates 'addr' on 'node' to the address space(s) of its
2869    # parent(s), by looking at 'ranges' properties. Returns the translated
2870    # address.
2871
2872    if not node.parent or "ranges" not in node.parent.props:
2873        # No translation
2874        return addr
2875
2876    if not node.parent.props["ranges"].value:
2877        # DT spec.: "If the property is defined with an <empty> value, it
2878        # specifies that the parent and child address space is identical, and
2879        # no address translation is required."
2880        #
2881        # Treat this the same as a 'range' that explicitly does a one-to-one
2882        # mapping, as opposed to there not being any translation.
2883        return _translate(addr, node.parent)
2884
2885    # Gives the size of each component in a translation 3-tuple in 'ranges'
2886    child_address_cells = _address_cells(node)
2887    parent_address_cells = _address_cells(node.parent)
2888    child_size_cells = _size_cells(node)
2889
2890    # Number of cells for one translation 3-tuple in 'ranges'
2891    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2892
2893    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2894                            f"4*(<#address-cells> (= {child_address_cells}) + "
2895                            "<#address-cells for parent> "
2896                            f"(= {parent_address_cells}) + "
2897                            f"<#size-cells> (= {child_size_cells}))"):
2898        child_addr = to_num(raw_range[:4*child_address_cells])
2899        raw_range = raw_range[4*child_address_cells:]
2900
2901        parent_addr = to_num(raw_range[:4*parent_address_cells])
2902        raw_range = raw_range[4*parent_address_cells:]
2903
2904        child_len = to_num(raw_range)
2905
2906        if child_addr <= addr < child_addr + child_len:
2907            # 'addr' is within range of a translation in 'ranges'. Recursively
2908            # translate it and return the result.
2909            return _translate(parent_addr + addr - child_addr, node.parent)
2910
2911    # 'addr' is not within range of any translation in 'ranges'
2912    return addr
2913
2914
2915def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2916    # Helper for registering names from <foo>-names properties.
2917    #
2918    # node:
2919    #   Node which has a property that might need named elements.
2920    #
2921    # names-ident:
2922    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2923    #
2924    # objs:
2925    #   list of objects whose .name field should be set
2926
2927    full_names_ident = names_ident + "-names"
2928
2929    if full_names_ident in node.props:
2930        names = node.props[full_names_ident].to_strings()
2931        if len(names) != len(objs):
2932            _err(f"{full_names_ident} property in {node.path} "
2933                 f"in {node.dt.filename} has {len(names)} strings, "
2934                 f"expected {len(objs)} strings")
2935
2936        for obj, name in zip(objs, names, strict=False):
2937            if obj is None:
2938                continue
2939            obj.name = name
2940    else:
2941        for obj in objs:
2942            if obj is not None:
2943                obj.name = None
2944
2945
2946def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2947    # Returns the node pointed at by the closest 'interrupt-parent', searching
2948    # the parents of 'node'. As of writing, this behavior isn't specified in
2949    # the DT spec., but seems to match what some .dts files except.
2950
2951    node: Optional[dtlib_Node] = start_node
2952
2953    while node:
2954        if "interrupt-parent" in node.props:
2955            iparent = node.props["interrupt-parent"].to_node()
2956            assert "interrupt-controller" in iparent.props or "interrupt-map" in iparent.props
2957            return iparent
2958        node = node.parent
2959        if node is None:
2960            _err(f"{start_node!r} no interrupt parent found")
2961        if ("interrupt-controller" in node.props) or ("interrupt-map" in node.props):
2962            return node
2963
2964    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2965         f"nor any of its parents has an 'interrupt-parent' property")
2966
2967
2968def _interrupts(node: dtlib_Node) -> list[tuple[dtlib_Node, bytes]]:
2969    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2970    # interrupt generated by 'node'. <controller> is the destination of the
2971    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2972    # the data associated with the interrupt (as a 'bytes' object).
2973
2974    # Takes precedence over 'interrupts' if both are present
2975    if "interrupts-extended" in node.props:
2976        prop = node.props["interrupts-extended"]
2977
2978        ret: list[tuple[dtlib_Node, bytes]] = []
2979        for entry in _phandle_val_list(prop, "interrupt"):
2980            if entry is None:
2981                _err(f"node '{node.path}' interrupts-extended property "
2982                     "has an empty element")
2983            iparent, spec = entry
2984            ret.append(_map_interrupt(node, iparent, spec))
2985        return ret
2986
2987    if "interrupts" in node.props:
2988        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2989        # the same interrupt parent for all interrupts
2990
2991        iparent = _interrupt_parent(node)
2992        interrupt_cells = _interrupt_cells(iparent)
2993
2994        return [_map_interrupt(node, iparent, raw)
2995                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2996                                  "4*<#interrupt-cells>")]
2997
2998    return []
2999
3000
3001def _map_interrupt(
3002        child: dtlib_Node,
3003        parent: dtlib_Node,
3004        child_spec: bytes
3005) -> tuple[dtlib_Node, bytes]:
3006    # Translates an interrupt headed from 'child' to 'parent' with data
3007    # 'child_spec' through any 'interrupt-map' properties. Returns a
3008    # (<controller>, <data>) tuple with the final destination after mapping.
3009
3010    if "interrupt-controller" in parent.props:
3011        return (parent, child_spec)
3012
3013    def own_address_cells(node):
3014        # Used for parents pointed at by 'interrupt-map'. We can't use
3015        # _address_cells(), because it's the #address-cells property on 'node'
3016        # itself that matters.
3017
3018        address_cells = _address_cells_self(node)
3019        if address_cells is None:
3020            _err(f"missing #address-cells on {node!r} "
3021                 "(while handling interrupt-map)")
3022        return address_cells
3023
3024    def spec_len_fn(node):
3025        # Can't use _address_cells() here, because it's the #address-cells
3026        # property on 'node' itself that matters
3027        return own_address_cells(node) + _interrupt_cells(node)
3028
3029    parent, raw_spec = _map(
3030        "interrupt", child, parent, _raw_unit_addr(child, parent) + child_spec,
3031        spec_len_fn, require_controller=True)
3032
3033    # Strip the parent unit address part, if any
3034    return (parent, raw_spec[4*own_address_cells(parent):])
3035
3036
3037def _map_phandle_array_entry(
3038        child: dtlib_Node,
3039        parent: dtlib_Node,
3040        child_spec: bytes,
3041        basename: str
3042) -> tuple[dtlib_Node, bytes]:
3043    # Returns a (<controller>, <data>) tuple with the final destination after
3044    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
3045    # _map_interrupt().
3046
3047    def spec_len_fn(node):
3048        prop_name = f"#{basename}-cells"
3049        if prop_name not in node.props:
3050            _err(f"expected '{prop_name}' property on {node!r} "
3051                 f"(referenced by {child!r})")
3052        return node.props[prop_name].to_num()
3053
3054    # Do not require <prefix>-controller for anything but interrupts for now
3055    return _map(basename, child, parent, child_spec, spec_len_fn,
3056                require_controller=False)
3057
3058
3059def _map(
3060        prefix: str,
3061        child: dtlib_Node,
3062        parent: dtlib_Node,
3063        child_spec: bytes,
3064        spec_len_fn: Callable[[dtlib_Node], int],
3065        require_controller: bool
3066) -> tuple[dtlib_Node, bytes]:
3067    # Common code for mapping through <prefix>-map properties, e.g.
3068    # interrupt-map and gpio-map.
3069    #
3070    # prefix:
3071    #   The prefix, e.g. "interrupt" or "gpio"
3072    #
3073    # child:
3074    #   The "sender", e.g. the node with 'interrupts = <...>'
3075    #
3076    # parent:
3077    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
3078    #   'interrupt-controller' (no mapping)
3079    #
3080    # child_spec:
3081    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
3082    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
3083    #
3084    # spec_len_fn:
3085    #   Function called on a parent specified in a *-map property to get the
3086    #   length of the parent specifier (data after phandle in *-map), in cells
3087    #
3088    # require_controller:
3089    #   If True, the final controller node after mapping is required to have
3090    #   to have a <prefix>-controller property.
3091
3092    map_prop = parent.props.get(prefix + "-map")
3093    if not map_prop:
3094        if require_controller and prefix + "-controller" not in parent.props:
3095            _err(f"expected '{prefix}-controller' property on {parent!r} "
3096                 f"(referenced by {child!r})")
3097
3098        # No mapping
3099        return (parent, child_spec)
3100
3101    masked_child_spec = _mask(prefix, child, parent, child_spec)
3102
3103    raw = map_prop.value
3104    while raw:
3105        if len(raw) < len(child_spec):
3106            _err(f"bad value for {map_prop!r}, missing/truncated child data")
3107        child_spec_entry = raw[:len(child_spec)]
3108        raw = raw[len(child_spec):]
3109
3110        if len(raw) < 4:
3111            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
3112        phandle = to_num(raw[:4])
3113        raw = raw[4:]
3114
3115        # Parent specified in *-map
3116        map_parent = parent.dt.phandle2node.get(phandle)
3117        if not map_parent:
3118            _err(f"bad phandle ({phandle}) in {map_prop!r}")
3119
3120        map_parent_spec_len = 4*spec_len_fn(map_parent)
3121        if len(raw) < map_parent_spec_len:
3122            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
3123        parent_spec = raw[:map_parent_spec_len]
3124        raw = raw[map_parent_spec_len:]
3125
3126        # Got one *-map row. Check if it matches the child data.
3127        if child_spec_entry == masked_child_spec:
3128            # Handle *-map-pass-thru
3129            parent_spec = _pass_thru(
3130                prefix, child, parent, child_spec, parent_spec)
3131
3132            # Found match. Recursively map and return it.
3133            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
3134                        require_controller)
3135
3136    _err(f"child specifier for {child!r} ({child_spec!r}) "
3137         f"does not appear in {map_prop!r}")
3138
3139
3140def _mask(
3141        prefix: str,
3142        child: dtlib_Node,
3143        parent: dtlib_Node,
3144        child_spec: bytes
3145) -> bytes:
3146    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
3147    # See _map() for the parameters.
3148
3149    mask_prop = parent.props.get(prefix + "-map-mask")
3150    if not mask_prop:
3151        # No mask
3152        return child_spec
3153
3154    mask = mask_prop.value
3155    if len(mask) != len(child_spec):
3156        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
3157             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
3158
3159    return _and(child_spec, mask)
3160
3161
3162def _pass_thru(
3163        prefix: str,
3164        child: dtlib_Node,
3165        parent: dtlib_Node,
3166        child_spec: bytes,
3167        parent_spec: bytes
3168) -> bytes:
3169    # Common code for handling <prefix>-map-thru properties, e.g.
3170    # interrupt-pass-thru.
3171    #
3172    # parent_spec:
3173    #   The parent data from the matched entry in the <prefix>-map property
3174    #
3175    # See _map() for the other parameters.
3176
3177    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
3178    if not pass_thru_prop:
3179        # No pass-thru
3180        return parent_spec
3181
3182    pass_thru = pass_thru_prop.value
3183    if len(pass_thru) != len(child_spec):
3184        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
3185             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
3186
3187    res = _or(_and(child_spec, pass_thru),
3188              _and(parent_spec, _not(pass_thru)))
3189
3190    # Truncate to length of parent spec.
3191    return res[-len(parent_spec):]
3192
3193
3194def _raw_unit_addr(node: dtlib_Node, parent: dtlib_Node) -> bytes:
3195    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
3196    # #address-cells) as a raw 'bytes'
3197
3198    iparent: Optional[dtlib_Node] = parent
3199    iparent_addr_len = _address_cells_self(iparent)
3200    parent_addr_len = _address_cells(node)
3201
3202    if iparent_addr_len is None:
3203        iparent_addr_len =  2  # Default value per DT spec.
3204
3205    if parent_addr_len is None:
3206        parent_addr_len =  2  # Default value per DT spec.
3207
3208    if iparent_addr_len == 0:
3209        return b''
3210
3211    if 'reg' not in node.props:
3212        _err(f"{node!r} lacks 'reg' property "
3213             "(needed for 'interrupt-map' unit address lookup)")
3214
3215    iparent_addr_len *= 4
3216    parent_addr_len *= 4
3217
3218    prop_len = len(node.props['reg'].value)
3219    if prop_len < iparent_addr_len or prop_len %4 != 0:
3220        _err(f"{node!r} has too short or incorrectly defined 'reg' property "
3221             "(while doing 'interrupt-map' unit address lookup)")
3222
3223    address = b''
3224    if parent_addr_len > iparent_addr_len:
3225        address = node.props['reg'].value[iparent_addr_len - parent_addr_len:parent_addr_len]
3226    else:
3227        address = node.props['reg'].value[:iparent_addr_len]
3228
3229    return address
3230
3231def _and(b1: bytes, b2: bytes) -> bytes:
3232    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
3233    # with ones on the left if the lengths are not equal.
3234
3235    # Pad on the left, to equal length
3236    maxlen = max(len(b1), len(b2))
3237    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
3238                                       b2.rjust(maxlen, b'\xff'), strict=False))
3239
3240
3241def _or(b1: bytes, b2: bytes) -> bytes:
3242    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3243    # zeros on the left if the lengths are not equal.
3244
3245    # Pad on the left, to equal length
3246    maxlen = max(len(b1), len(b2))
3247    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3248                                       b2.rjust(maxlen, b'\x00'), strict=False))
3249
3250
3251def _not(b: bytes) -> bytes:
3252    # Returns the bitwise not of the 'bytes' object 'b'
3253
3254    # ANDing with 0xFF avoids negative numbers
3255    return bytes(~x & 0xFF for x in b)
3256
3257
3258def _phandle_val_list(
3259        prop: dtlib_Property,
3260        n_cells_name: str
3261) -> list[Optional[tuple[dtlib_Node, bytes]]]:
3262    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3263    # cells that make up each <value> is derived from the node pointed at by
3264    # the preceding <phandle>.
3265    #
3266    # prop:
3267    #   dtlib.Property with value to parse
3268    #
3269    # n_cells_name:
3270    #   The <name> part of the #<name>-cells property to look for on the nodes
3271    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3272    #
3273    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3274    # is the node pointed at by <phandle>. If <phandle> does not refer
3275    # to a node, the entire list element is None.
3276
3277    full_n_cells_name = f"#{n_cells_name}-cells"
3278
3279    res: list[Optional[tuple[dtlib_Node, bytes]]] = []
3280
3281    raw = prop.value
3282    while raw:
3283        if len(raw) < 4:
3284            # Not enough room for phandle
3285            _err("bad value for " + repr(prop))
3286        phandle = to_num(raw[:4])
3287        raw = raw[4:]
3288
3289        node = prop.node.dt.phandle2node.get(phandle)
3290        if not node:
3291            # Unspecified phandle-array element. This is valid; a 0
3292            # phandle value followed by no cells is an empty element.
3293            res.append(None)
3294            continue
3295
3296        if full_n_cells_name not in node.props:
3297            _err(f"{node!r} lacks {full_n_cells_name}")
3298
3299        n_cells = node.props[full_n_cells_name].to_num()
3300        if len(raw) < 4*n_cells:
3301            _err("missing data after phandle in " + repr(prop))
3302
3303        res.append((node, raw[:4*n_cells]))
3304        raw = raw[4*n_cells:]
3305
3306    return res
3307
3308
3309def _address_cells_self(node: Optional[dtlib_Node]) -> Optional[int]:
3310    # Returns the #address-cells setting for 'node', giving the number of <u32>
3311    # cells used to encode the address in the 'reg' property
3312
3313    if node is not None and "#address-cells" in node.props:
3314        return node.props["#address-cells"].to_num()
3315    return None
3316
3317def _address_cells(node: dtlib_Node) -> int:
3318    # Returns the #address-cells setting for parent node of 'node', giving the number of <u32>
3319    # cells used to encode the address in the 'reg' property
3320    if TYPE_CHECKING:
3321        assert node.parent
3322
3323    ret = _address_cells_self(node.parent)
3324    if ret is None:
3325        return 2  # Default value per DT spec.
3326    return int(ret)
3327
3328
3329def _size_cells(node: dtlib_Node) -> int:
3330    # Returns the #size-cells setting for 'node', giving the number of <u32>
3331    # cells used to encode the size in the 'reg' property
3332    if TYPE_CHECKING:
3333        assert node.parent
3334
3335    if "#size-cells" in node.parent.props:
3336        return node.parent.props["#size-cells"].to_num()
3337    return 1  # Default value per DT spec.
3338
3339
3340def _interrupt_cells(node: dtlib_Node) -> int:
3341    # Returns the #interrupt-cells property value on 'node', erroring out if
3342    # 'node' has no #interrupt-cells property
3343
3344    if "#interrupt-cells" not in node.props:
3345        _err(f"{node!r} lacks #interrupt-cells")
3346    return node.props["#interrupt-cells"].to_num()
3347
3348
3349def _slice(node: dtlib_Node,
3350           prop_name: str,
3351           size: int,
3352           size_hint: str) -> list[bytes]:
3353    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3354
3355
3356def _check_dt(dt: DT) -> None:
3357    # Does devicetree sanity checks. dtlib is meant to be general and
3358    # anything-goes except for very special properties like phandle, but in
3359    # edtlib we can be pickier.
3360
3361    # Check that 'status' has one of the values given in the devicetree spec.
3362
3363    ok_status = {"okay", "disabled", "reserved", "fail", "fail-sss"}
3364
3365    for node in dt.node_iter():
3366        if "status" in node.props:
3367            try:
3368                status_val = node.props["status"].to_string()
3369            except DTError as e:
3370                # The error message gives the path
3371                _err(str(e))
3372
3373            if status_val not in ok_status:
3374                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3375                     f"in {node.dt.filename}, expected one of " +
3376                     ", ".join(ok_status) +
3377                     " (see the devicetree specification)")
3378
3379        ranges_prop = node.props.get("ranges")
3380        if ranges_prop and ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3381            _err(f"expected 'ranges = < ... >;' in {node.path} in "
3382                 f"{node.dt.filename}, not '{ranges_prop}' "
3383                  "(see the devicetree specification)")
3384
3385
3386def _err(msg) -> NoReturn:
3387    raise EDTError(msg)
3388
3389# Logging object
3390_LOG = logging.getLogger(__name__)
3391
3392# Regular expression for non-alphanumeric-or-underscore characters.
3393_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3394
3395
3396def str_as_token(val: str) -> str:
3397    """Return a canonical representation of a string as a C token.
3398
3399    This converts special characters in 'val' to underscores, and
3400    returns the result."""
3401
3402    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3403
3404
3405# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3406# which could interfere with YAML loading in clients
3407class _BindingLoader(Loader):
3408    pass
3409
3410
3411# Add legacy '!include foo.yaml' handling
3412_BindingLoader.add_constructor("!include", _binding_include)
3413
3414#
3415# "Default" binding for properties which are defined by the spec.
3416#
3417# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3418# updating the documentation for the DT_PROP() macro in
3419# include/devicetree.h.
3420#
3421
3422_DEFAULT_PROP_TYPES: dict[str, str] = {
3423    "compatible": "string-array",
3424    "status": "string",
3425    "ranges": "compound",  # NUMS or EMPTY
3426    "reg": "array",
3427    "reg-names": "string-array",
3428    "label": "string",
3429    "interrupts": "array",
3430    "interrupts-extended": "compound",
3431    "interrupt-names": "string-array",
3432    "interrupt-controller": "boolean",
3433}
3434
3435_STATUS_ENUM: list[str] = "ok okay disabled reserved fail fail-sss".split()
3436
3437def _raw_default_property_for(
3438        name: str
3439) -> dict[str, Union[str, bool, list[str]]]:
3440    ret: dict[str, Union[str, bool, list[str]]] = {
3441        'type': _DEFAULT_PROP_TYPES[name],
3442        'required': False,
3443    }
3444    if name == 'status':
3445        ret['enum'] = _STATUS_ENUM
3446    return ret
3447
3448_DEFAULT_PROP_BINDING: Binding = Binding(
3449    None, {},
3450    raw={
3451        'properties': {
3452            name: _raw_default_property_for(name)
3453            for name in _DEFAULT_PROP_TYPES
3454        },
3455    },
3456    require_compatible=False,
3457    require_description=False,
3458)
3459
3460_DEFAULT_PROP_SPECS: dict[str, PropertySpec] = {
3461    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3462    for name in _DEFAULT_PROP_TYPES
3463}
3464