1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# SPDX-License-Identifier: BSD-3-Clause
4
5# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
6
7"""
8Library for working with devicetrees at a higher level compared to dtlib. Like
9dtlib, this library presents a tree of devicetree nodes, but the nodes are
10augmented with information from bindings and include some interpretation of
11properties. Some of this interpretation is based on conventions established
12by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
13source code is sometimes good reference material.
14
15Bindings are YAML files that describe devicetree nodes. Devicetree
16nodes are usually mapped to bindings via their 'compatible = "..."' property,
17but a binding can also come from a 'child-binding:' key in the binding for the
18parent devicetree node.
19
20Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
21which has all the information related to the node.
22
23The top-level entry points for the library are the EDT and Binding classes.
24See their constructor docstrings for details. There is also a
25bindings_from_paths() helper function.
26"""
27
28# NOTE: tests/test_edtlib.py is the test suite for this library.
29
30# Implementation notes
31# --------------------
32#
33# A '_' prefix on an identifier in Python is a convention for marking it private.
34# Please do not access private things. Instead, think of what API you need, and
35# add it.
36#
37# This module is not meant to have any global state. It should be possible to
38# create several EDT objects with independent binding paths and flags. If you
39# need to add a configuration parameter or the like, store it in the EDT
40# instance, and initialize it e.g. with a constructor argument.
41#
42# This library is layered on top of dtlib, and is not meant to expose it to
43# clients. This keeps the header generation script simple.
44#
45# General biased advice:
46#
47# - Consider using @property for APIs that don't need parameters. It makes
48#   functions look like attributes, which is less awkward in clients, and makes
49#   it easy to switch back and forth between variables and functions.
50#
51# - Think about the data type of the thing you're exposing. Exposing something
52#   as e.g. a list or a dictionary is often nicer and more flexible than adding
53#   a function.
54#
55# - Avoid get_*() prefixes on functions. Name them after the thing they return
56#   instead. This often makes the code read more naturally in callers.
57#
58#   Also, consider using @property instead of get_*().
59#
60# - Don't expose dtlib stuff directly.
61#
62# - Add documentation for any new APIs you add.
63#
64#   The convention here is that docstrings (quoted strings) are used for public
65#   APIs, and "doc comments" for internal functions.
66#
67#   @properties are documented in the class docstring, as if they were
68#   variables. See the existing @properties for a template.
69
70from collections import defaultdict
71from copy import deepcopy
72from dataclasses import dataclass
73from typing import (Any, Callable, Iterable, NoReturn,
74                    Optional, TYPE_CHECKING, Union)
75import base64
76import hashlib
77import logging
78import os
79import re
80
81import yaml
82try:
83    # Use the C LibYAML parser if available, rather than the Python parser.
84    # This makes e.g. gen_defines.py more than twice as fast.
85    from yaml import CLoader as Loader
86except ImportError:
87    from yaml import Loader     # type: ignore
88
89from devicetree.dtlib import DT, DTError, to_num, to_nums, Type
90from devicetree.dtlib import Node as dtlib_Node
91from devicetree.dtlib import Property as dtlib_Property
92from devicetree.grutils import Graph
93from devicetree._private import _slice_helper
94
95def _compute_hash(path: str) -> str:
96    # Calculates the hash associated with the node's full path.
97    hasher = hashlib.sha256()
98    hasher.update(path.encode())
99    return base64.b64encode(hasher.digest(), altchars=b'__').decode().rstrip('=')
100
101#
102# Public classes
103#
104
105
106class Binding:
107    """
108    Represents a parsed binding.
109
110    These attributes are available on Binding objects:
111
112    path:
113      The absolute path to the file defining the binding.
114
115    description:
116      The free-form description of the binding, or None.
117
118    compatible:
119      The compatible string the binding matches.
120
121      This may be None. For example, it's None when the Binding is inferred
122      from node properties. It can also be None for Binding objects created
123      using 'child-binding:' with no compatible.
124
125    prop2specs:
126      A dict mapping property names to PropertySpec objects
127      describing those properties' values.
128
129    specifier2cells:
130      A dict that maps specifier space names (like "gpio",
131      "clock", "pwm", etc.) to lists of cell names.
132
133      For example, if the binding YAML contains 'pin' and 'flags' cell names
134      for the 'gpio' specifier space, like this:
135
136          gpio-cells:
137          - pin
138          - flags
139
140      Then the Binding object will have a 'specifier2cells' attribute mapping
141      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
142      cells.
143
144    raw:
145      The binding as an object parsed from YAML.
146
147    bus:
148      If nodes with this binding's 'compatible' describe a bus, a string
149      describing the bus type (like "i2c") or a list describing supported
150      protocols (like ["i3c", "i2c"]). None otherwise.
151
152      Note that this is the raw value from the binding where it can be
153      a string or a list. Use "buses" instead unless you need the raw
154      value, where "buses" is always a list.
155
156    buses:
157      Deprived property from 'bus' where 'buses' is a list of bus(es),
158      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
159      no 'bus:' in this binding.
160
161    on_bus:
162      If nodes with this binding's 'compatible' appear on a bus, a string
163      describing the bus type (like "i2c"). None otherwise.
164
165    child_binding:
166      If this binding describes the properties of child nodes, then
167      this is a Binding object for those children; it is None otherwise.
168      A Binding object's 'child_binding.child_binding' is not None if there
169      are multiple levels of 'child-binding' descriptions in the binding.
170    """
171
172    def __init__(self, path: Optional[str], fname2path: dict[str, str],
173                 raw: Any = None, require_compatible: bool = True,
174                 require_description: bool = True):
175        """
176        Binding constructor.
177
178        path:
179          Path to binding YAML file. May be None.
180
181        fname2path:
182          Map from include files to their absolute paths. Must
183          not be None, but may be empty.
184
185        raw:
186          Optional raw content in the binding.
187          This does not have to have any "include:" lines resolved.
188          May be left out, in which case 'path' is opened and read.
189          This can be used to resolve child bindings, for example.
190
191        require_compatible:
192          If True, it is an error if the binding does not contain a
193          "compatible:" line. If False, a missing "compatible:" is
194          not an error. Either way, "compatible:" must be a string
195          if it is present in the binding.
196
197        require_description:
198          If True, it is an error if the binding does not contain a
199          "description:" line. If False, a missing "description:" is
200          not an error. Either way, "description:" must be a string
201          if it is present in the binding.
202        """
203        self.path: Optional[str] = path
204        self._fname2path: dict[str, str] = fname2path
205
206        if raw is None:
207            if path is None:
208                _err("you must provide either a 'path' or a 'raw' argument")
209            with open(path, encoding="utf-8") as f:
210                raw = yaml.load(f, Loader=_BindingLoader)
211
212        # Merge any included files into self.raw. This also pulls in
213        # inherited child binding definitions, so it has to be done
214        # before initializing those.
215        self.raw: dict = self._merge_includes(raw, self.path)
216
217        # Recursively initialize any child bindings. These don't
218        # require a 'compatible' or 'description' to be well defined,
219        # but they must be dicts.
220        if "child-binding" in raw:
221            if not isinstance(raw["child-binding"], dict):
222                _err(f"malformed 'child-binding:' in {self.path}, "
223                     "expected a binding (dictionary with keys/values)")
224            self.child_binding: Optional['Binding'] = Binding(
225                path, fname2path,
226                raw=raw["child-binding"],
227                require_compatible=False,
228                require_description=False)
229        else:
230            self.child_binding = None
231
232        # Make sure this is a well defined object.
233        self._check(require_compatible, require_description)
234
235        # Initialize look up tables.
236        self.prop2specs: dict[str, 'PropertySpec'] = {}
237        for prop_name in self.raw.get("properties", {}).keys():
238            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
239        self.specifier2cells: dict[str, list[str]] = {}
240        for key, val in self.raw.items():
241            if key.endswith("-cells"):
242                self.specifier2cells[key[:-len("-cells")]] = val
243
244    def __repr__(self) -> str:
245        if self.compatible:
246            compat = f" for compatible '{self.compatible}'"
247        else:
248            compat = ""
249        basename = os.path.basename(self.path or "")
250        return f"<Binding {basename}" + compat + ">"
251
252    @property
253    def description(self) -> Optional[str]:
254        "See the class docstring"
255        return self.raw.get('description')
256
257    @property
258    def compatible(self) -> Optional[str]:
259        "See the class docstring"
260        return self.raw.get('compatible')
261
262    @property
263    def bus(self) -> Union[None, str, list[str]]:
264        "See the class docstring"
265        return self.raw.get('bus')
266
267    @property
268    def buses(self) -> list[str]:
269        "See the class docstring"
270        if self.raw.get('bus') is not None:
271            return self._buses
272        else:
273            return []
274
275    @property
276    def on_bus(self) -> Optional[str]:
277        "See the class docstring"
278        return self.raw.get('on-bus')
279
280    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
281        # Constructor helper. Merges included files in
282        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
283        # source of include files, removing the "include" key while
284        # doing so.
285        #
286        # This treats 'binding_path' as the binding file being built up
287        # and uses it for error messages.
288
289        if "include" not in raw:
290            return raw
291
292        include = raw.pop("include")
293
294        # First, merge the included files together. If more than one included
295        # file has a 'required:' for a particular property, OR the values
296        # together, so that 'required: true' wins.
297
298        merged: dict[str, Any] = {}
299
300        if isinstance(include, str):
301            # Simple scalar string case
302            _merge_props(merged, self._load_raw(include), None, binding_path,
303                         False)
304        elif isinstance(include, list):
305            # List of strings and maps. These types may be intermixed.
306            for elem in include:
307                if isinstance(elem, str):
308                    _merge_props(merged, self._load_raw(elem), None,
309                                 binding_path, False)
310                elif isinstance(elem, dict):
311                    name = elem.pop('name', None)
312                    allowlist = elem.pop('property-allowlist', None)
313                    blocklist = elem.pop('property-blocklist', None)
314                    child_filter = elem.pop('child-binding', None)
315
316                    if elem:
317                        # We've popped out all the valid keys.
318                        _err(f"'include:' in {binding_path} should not have "
319                             f"these unexpected contents: {elem}")
320
321                    _check_include_dict(name, allowlist, blocklist,
322                                        child_filter, binding_path)
323
324                    contents = self._load_raw(name)
325
326                    _filter_properties(contents, allowlist, blocklist,
327                                       child_filter, binding_path)
328                    _merge_props(merged, contents, None, binding_path, False)
329                else:
330                    _err(f"all elements in 'include:' in {binding_path} "
331                         "should be either strings or maps with a 'name' key "
332                         "and optional 'property-allowlist' or "
333                         f"'property-blocklist' keys, but got: {elem}")
334        else:
335            # Invalid item.
336            _err(f"'include:' in {binding_path} "
337                 f"should be a string or list, but has type {type(include)}")
338
339        # Next, merge the merged included files into 'raw'. Error out if
340        # 'raw' has 'required: false' while the merged included files have
341        # 'required: true'.
342
343        _merge_props(raw, merged, None, binding_path, check_required=True)
344
345        return raw
346
347    def _load_raw(self, fname: str) -> dict:
348        # Returns the contents of the binding given by 'fname' after merging
349        # any bindings it lists in 'include:' into it. 'fname' is just the
350        # basename of the file, so we check that there aren't multiple
351        # candidates.
352
353        path = self._fname2path.get(fname)
354
355        if not path:
356            _err(f"'{fname}' not found")
357
358        with open(path, encoding="utf-8") as f:
359            contents = yaml.load(f, Loader=_BindingLoader)
360            if not isinstance(contents, dict):
361                _err(f'{path}: invalid contents, expected a mapping')
362
363        return self._merge_includes(contents, path)
364
365    def _check(self, require_compatible: bool, require_description: bool):
366        # Does sanity checking on the binding.
367
368        raw = self.raw
369
370        if "compatible" in raw:
371            compatible = raw["compatible"]
372            if not isinstance(compatible, str):
373                _err(f"malformed 'compatible: {compatible}' "
374                     f"field in {self.path} - "
375                     f"should be a string, not {type(compatible).__name__}")
376        elif require_compatible:
377            _err(f"missing 'compatible' in {self.path}")
378
379        if "description" in raw:
380            description = raw["description"]
381            if not isinstance(description, str) or not description:
382                _err(f"malformed or empty 'description' in {self.path}")
383        elif require_description:
384            _err(f"missing 'description' in {self.path}")
385
386        # Allowed top-level keys. The 'include' key should have been
387        # removed by _load_raw() already.
388        ok_top = {"description", "compatible", "bus", "on-bus",
389                  "properties", "child-binding"}
390
391        # Descriptive errors for legacy bindings.
392        legacy_errors = {
393            "#cells": "expected *-cells syntax",
394            "child": "use 'bus: <bus>' instead",
395            "child-bus": "use 'bus: <bus>' instead",
396            "parent": "use 'on-bus: <bus>' instead",
397            "parent-bus": "use 'on-bus: <bus>' instead",
398            "sub-node": "use 'child-binding' instead",
399            "title": "use 'description' instead",
400        }
401
402        for key in raw:
403            if key in legacy_errors:
404                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
405
406            if key not in ok_top and not key.endswith("-cells"):
407                _err(f"unknown key '{key}' in {self.path}, "
408                     "expected one of {', '.join(ok_top)}, or *-cells")
409
410        if "bus" in raw:
411            bus = raw["bus"]
412            if (not isinstance(bus, str) and
413               (not isinstance(bus, list) and
414                not all(isinstance(elem, str) for elem in bus))):
415                _err(f"malformed 'bus:' value in {self.path}, "
416                     "expected string or list of strings")
417
418            if isinstance(bus, list):
419                self._buses = bus
420            else:
421                # Convert bus into a list
422                self._buses = [bus]
423
424        if ("on-bus" in raw
425            and not isinstance(raw["on-bus"], str)):
426            _err(f"malformed 'on-bus:' value in {self.path}, "
427                 "expected string")
428
429        self._check_properties()
430
431        for key, val in raw.items():
432            if key.endswith("-cells"):
433                if (not isinstance(val, list)
434                    or not all(isinstance(elem, str) for elem in val)):
435                    _err(f"malformed '{key}:' in {self.path}, "
436                         "expected a list of strings")
437
438    def _check_properties(self) -> None:
439        # _check() helper for checking the contents of 'properties:'.
440
441        raw = self.raw
442
443        if "properties" not in raw:
444            return
445
446        ok_prop_keys = {"description", "type", "required",
447                        "enum", "const", "default", "deprecated",
448                        "specifier-space"}
449
450        for prop_name, options in raw["properties"].items():
451            for key in options:
452                if key not in ok_prop_keys:
453                    _err(f"unknown setting '{key}' in "
454                         f"'properties: {prop_name}: ...' in {self.path}, "
455                         f"expected one of {', '.join(ok_prop_keys)}")
456
457            _check_prop_by_type(prop_name, options, self.path)
458
459            for true_false_opt in ["required", "deprecated"]:
460                if true_false_opt in options:
461                    option = options[true_false_opt]
462                    if not isinstance(option, bool):
463                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
464                             f"for '{prop_name}' in 'properties' in {self.path}, "
465                             "expected true/false")
466
467            if options.get("deprecated") and options.get("required"):
468                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
469                      "have both 'deprecated' and 'required' set")
470
471            if ("description" in options
472                and not isinstance(options["description"], str)):
473                _err("missing, malformed, or empty 'description' for "
474                     f"'{prop_name}' in 'properties' in {self.path}")
475
476            if "enum" in options and not isinstance(options["enum"], list):
477                _err(f"enum in {self.path} for property '{prop_name}' "
478                     "is not a list")
479
480
481class PropertySpec:
482    """
483    Represents a "property specification", i.e. the description of a
484    property provided by a binding file, like its type and description.
485
486    These attributes are available on PropertySpec objects:
487
488    binding:
489      The Binding object which defined this property.
490
491    name:
492      The property's name.
493
494    path:
495      The file where this property was defined. In case a binding includes
496      other bindings, this is the including binding file.
497      Generally this means that this will be the binding file specifying
498      the devicetree node of which this is a property.
499
500    type:
501      The type of the property as a string, as given in the binding.
502
503    description:
504      The free-form description of the property as a string, or None.
505
506    enum:
507      A list of values the property may take as given in the binding, or None.
508
509    enum_tokenizable:
510      True if enum is not None and all the values in it are tokenizable;
511      False otherwise.
512
513      A property must have string or string-array type and an "enum:" in its
514      binding to be tokenizable. Additionally, the "enum:" values must be
515      unique after converting all non-alphanumeric characters to underscores
516      (so "foo bar" and "foo_bar" in the same "enum:" would not be
517      tokenizable).
518
519    enum_upper_tokenizable:
520      Like 'enum_tokenizable', with the additional restriction that the
521      "enum:" values must be unique after uppercasing and converting
522      non-alphanumeric characters to underscores.
523
524    const:
525      The property's constant value as given in the binding, or None.
526
527    default:
528      The property's default value as given in the binding, or None.
529
530    deprecated:
531      True if the property is deprecated; False otherwise.
532
533    required:
534      True if the property is marked required; False otherwise.
535
536    specifier_space:
537      The specifier space for the property as given in the binding, or None.
538    """
539
540    def __init__(self, name: str, binding: Binding):
541        self.binding: Binding = binding
542        self.name: str = name
543        self._raw: dict[str, Any] = self.binding.raw["properties"][name]
544
545    def __repr__(self) -> str:
546        return f"<PropertySpec {self.name} type '{self.type}'>"
547
548    @property
549    def path(self) -> Optional[str]:
550        "See the class docstring"
551        return self.binding.path
552
553    @property
554    def type(self) -> str:
555        "See the class docstring"
556        return self._raw["type"]
557
558    @property
559    def description(self) -> Optional[str]:
560        "See the class docstring"
561        return self._raw.get("description")
562
563    @property
564    def enum(self) -> Optional[list]:
565        "See the class docstring"
566        return self._raw.get("enum")
567
568    @property
569    def enum_tokenizable(self) -> bool:
570        "See the class docstring"
571        if not hasattr(self, '_enum_tokenizable'):
572            if self.type not in {'string', 'string-array'} or self.enum is None:
573                self._enum_tokenizable = False
574            else:
575                # Saving _as_tokens here lets us reuse it in
576                # enum_upper_tokenizable.
577                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
578                                          '_', value)
579                                   for value in self.enum]
580                self._enum_tokenizable = (len(self._as_tokens) ==
581                                          len(set(self._as_tokens)))
582
583        return self._enum_tokenizable
584
585    @property
586    def enum_upper_tokenizable(self) -> bool:
587        "See the class docstring"
588        if not hasattr(self, '_enum_upper_tokenizable'):
589            if not self.enum_tokenizable:
590                self._enum_upper_tokenizable = False
591            else:
592                self._enum_upper_tokenizable = (
593                    len(self._as_tokens) == len(
594                        set(x.upper() for x in self._as_tokens)
595                    ))
596        return self._enum_upper_tokenizable
597
598    @property
599    def const(self) -> Union[None, int, list[int], str, list[str]]:
600        "See the class docstring"
601        return self._raw.get("const")
602
603    @property
604    def default(self) -> Union[None, int, list[int], str, list[str]]:
605        "See the class docstring"
606        return self._raw.get("default")
607
608    @property
609    def required(self) -> bool:
610        "See the class docstring"
611        return self._raw.get("required", False)
612
613    @property
614    def deprecated(self) -> bool:
615        "See the class docstring"
616        return self._raw.get("deprecated", False)
617
618    @property
619    def specifier_space(self) -> Optional[str]:
620        "See the class docstring"
621        return self._raw.get("specifier-space")
622
623PropertyValType = Union[int, str,
624                        list[int], list[str],
625                        'Node', list['Node'],
626                        list[Optional['ControllerAndData']],
627                        bytes, None]
628
629
630@dataclass
631class Property:
632    """
633    Represents a property on a Node, as set in its DT node and with
634    additional info from the 'properties:' section of the binding.
635
636    Only properties mentioned in 'properties:' get created. Properties of type
637    'compound' currently do not get Property instances, as it's not clear
638    what to generate for them.
639
640    These attributes are available on Property objects. Several are
641    just convenience accessors for attributes on the PropertySpec object
642    accessible via the 'spec' attribute.
643
644    These attributes are available on Property objects:
645
646    spec:
647      The PropertySpec object which specifies this property.
648
649    val:
650      The value of the property, with the format determined by spec.type,
651      which comes from the 'type:' string in the binding.
652
653        - For 'type: int/array/string/string-array', 'val' is what you'd expect
654          (a Python integer or string, or a list of them)
655
656        - For 'type: uint8-array', 'val' is a bytes object
657
658        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
659          instance
660
661        - For 'type: phandles', 'val' is a list of the pointed-to Node
662          instances
663
664        - For 'type: phandle-array', 'val' is a list of ControllerAndData
665          instances. See the documentation for that class.
666
667    node:
668      The Node instance the property is on
669
670    name:
671      Convenience for spec.name.
672
673    description:
674      Convenience for spec.description with leading and trailing whitespace
675      (including newlines) removed. May be None.
676
677    type:
678      Convenience for spec.type.
679
680    val_as_tokens:
681      The value of the property as a list of tokens, i.e. with non-alphanumeric
682      characters replaced with underscores. This is only safe to access
683      if 'spec.enum_tokenizable' returns True.
684
685    enum_indices:
686      A list of indices of 'val' in 'spec.enum' (which comes from the 'enum:'
687      list in the binding), or None if spec.enum is None.
688    """
689
690    spec: PropertySpec
691    val: PropertyValType
692    node: 'Node'
693
694    @property
695    def name(self) -> str:
696        "See the class docstring"
697        return self.spec.name
698
699    @property
700    def description(self) -> Optional[str]:
701        "See the class docstring"
702        return self.spec.description.strip() if self.spec.description else None
703
704    @property
705    def type(self) -> str:
706        "See the class docstring"
707        return self.spec.type
708
709    @property
710    def val_as_tokens(self) -> list[str]:
711        "See the class docstring"
712        ret = []
713        for subval in self.val if isinstance(self.val, list) else [self.val]:
714            assert isinstance(subval, str)
715            ret.append(str_as_token(subval))
716        return ret
717
718    @property
719    def enum_indices(self) -> Optional[list[int]]:
720        "See the class docstring"
721        enum = self.spec.enum
722        val = self.val if isinstance(self.val, list) else [self.val]
723        return [enum.index(subval) for subval in val] if enum else None
724
725
726@dataclass
727class Register:
728    """
729    Represents a register on a node.
730
731    These attributes are available on Register objects:
732
733    node:
734      The Node instance this register is from
735
736    name:
737      The name of the register as given in the 'reg-names' property, or None if
738      there is no 'reg-names' property
739
740    addr:
741      The starting address of the register, in the parent address space, or None
742      if #address-cells is zero. Any 'ranges' properties are taken into account.
743
744    size:
745      The length of the register in bytes
746    """
747
748    node: 'Node'
749    name: Optional[str]
750    addr: Optional[int]
751    size: Optional[int]
752
753
754@dataclass
755class Range:
756    """
757    Represents a translation range on a node as described by the 'ranges' property.
758
759    These attributes are available on Range objects:
760
761    node:
762      The Node instance this range is from
763
764    child_bus_cells:
765      The number of cells used to describe a child bus address.
766
767    child_bus_addr:
768      A physical address within the child bus address space, or None if the
769      child's #address-cells equals 0.
770
771    parent_bus_cells:
772      The number of cells used to describe a parent bus address.
773
774    parent_bus_addr:
775      A physical address within the parent bus address space, or None if the
776      parent's #address-cells equals 0.
777
778    length_cells:
779      The number of cells used to describe the size of range in
780      the child's address space.
781
782    length:
783      The size of the range in the child address space, or None if the
784      child's #size-cells equals 0.
785    """
786    node: 'Node'
787    child_bus_cells: int
788    child_bus_addr: Optional[int]
789    parent_bus_cells: int
790    parent_bus_addr: Optional[int]
791    length_cells: int
792    length: Optional[int]
793
794
795@dataclass
796class ControllerAndData:
797    """
798    Represents an entry in an 'interrupts' or 'type: phandle-array' property
799    value, e.g. <&ctrl-1 4 0> in
800
801        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
802
803    These attributes are available on ControllerAndData objects:
804
805    node:
806      The Node instance the property appears on
807
808    controller:
809      The Node instance for the controller (e.g. the controller the interrupt
810      gets sent to for interrupts)
811
812    data:
813      A dictionary that maps names from the *-cells key in the binding for the
814      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
815      above.
816
817      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
818
819    name:
820      The name of the entry as given in
821      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
822      *-names property
823
824    basename:
825      Basename for the controller when supporting named cells
826    """
827    node: 'Node'
828    controller: 'Node'
829    data: dict
830    name: Optional[str]
831    basename: Optional[str]
832
833
834@dataclass
835class PinCtrl:
836    """
837    Represents a pin control configuration for a set of pins on a device,
838    e.g. pinctrl-0 or pinctrl-1.
839
840    These attributes are available on PinCtrl objects:
841
842    node:
843      The Node instance the pinctrl-* property is on
844
845    name:
846      The name of the configuration, as given in pinctrl-names, or None if
847      there is no pinctrl-names property
848
849    name_as_token:
850      Like 'name', but with non-alphanumeric characters converted to underscores.
851
852    conf_nodes:
853      A list of Node instances for the pin configuration nodes, e.g.
854      the nodes pointed at by &state_1 and &state_2 in
855
856          pinctrl-0 = <&state_1 &state_2>;
857    """
858
859    node: 'Node'
860    name: Optional[str]
861    conf_nodes: list['Node']
862
863    @property
864    def name_as_token(self):
865        "See the class docstring"
866        return str_as_token(self.name) if self.name is not None else None
867
868
869class Node:
870    """
871    Represents a devicetree node, augmented with information from bindings, and
872    with some interpretation of devicetree properties. There's a one-to-one
873    correspondence between devicetree nodes and Nodes.
874
875    These attributes are available on Node objects:
876
877    edt:
878      The EDT instance this node is from
879
880    name:
881      The name of the node
882
883    unit_addr:
884      An integer with the ...@<unit-address> portion of the node name,
885      translated through any 'ranges' properties on parent nodes, or None if
886      the node name has no unit-address portion. PCI devices use a different
887      node name format ...@<dev>,<func> or ...@<dev> (e.g. "pcie@1,0"), in
888      this case None is returned.
889
890    description:
891      The description string from the binding for the node, or None if the node
892      has no binding. Leading and trailing whitespace (including newlines) is
893      removed.
894
895    path:
896      The devicetree path of the node
897
898    label:
899      The text from the 'label' property on the node, or None if the node has
900      no 'label'
901
902    labels:
903      A list of all of the devicetree labels for the node, in the same order
904      as the labels appear, but with duplicates removed.
905
906      This corresponds to the actual devicetree source labels, unlike the
907      "label" attribute, which is the value of a devicetree property named
908      "label".
909
910    parent:
911      The Node instance for the devicetree parent of the Node, or None if the
912      node is the root node
913
914    children:
915      A dictionary with the Node instances for the devicetree children of the
916      node, indexed by name
917
918    dep_ordinal:
919      A non-negative integer value such that the value for a Node is
920      less than the value for all Nodes that depend on it.
921
922      The ordinal is defined for all Nodes, and is unique among nodes in its
923      EDT 'nodes' list.
924
925    hash:
926      A hashed value of the devicetree path of the node. This is defined for
927      all Nodes, and is checked for uniqueness among nodes in its EDT 'nodes'
928      list.
929
930    required_by:
931      A list with the nodes that directly depend on the node
932
933    depends_on:
934      A list with the nodes that the node directly depends on
935
936    status:
937      The node's status property value, as a string, or "okay" if the node
938      has no status property set. If the node's status property is "ok",
939      it is converted to "okay" for consistency.
940
941    read_only:
942      True if the node has a 'read-only' property, and False otherwise
943
944    matching_compat:
945      The 'compatible' string for the binding that matched the node, or None if
946      the node has no binding
947
948    binding_path:
949      The path to the binding file for the node, or None if the node has no
950      binding
951
952    compats:
953      A list of 'compatible' strings for the node, in the same order that
954      they're listed in the .dts file
955
956    ranges:
957      A list of Range objects extracted from the node's ranges property.
958      The list is empty if the node does not have a range property.
959
960    regs:
961      A list of Register objects for the node's registers
962
963    props:
964      A dict that maps property names to Property objects.
965      Property objects are created for the devicetree properties
966      defined by the node's binding and that have a default value
967      or for which a value is set in the DTS.
968
969    aliases:
970      A list of aliases for the node. This is fetched from the /aliases node.
971
972    interrupts:
973      A list of ControllerAndData objects for the interrupts generated by the
974      node. The list is empty if the node does not generate interrupts.
975
976    pinctrls:
977      A list of PinCtrl objects for the pinctrl-<index> properties on the
978      node, sorted by index. The list is empty if the node does not have any
979      pinctrl-<index> properties.
980
981    buses:
982      If the node is a bus node (has a 'bus:' key in its binding), then this
983      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
984      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
985      If the node is not a bus node, then this attribute is an empty list.
986
987    on_buses:
988      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
989      multiple protocols are supported via the same bus. The bus is determined
990      by searching upwards for a parent node whose binding has a 'bus:' key,
991      returning the value of the first 'bus:' key found. If none of the node's
992      parents has a 'bus:' key, this attribute is an empty list.
993
994    bus_node:
995      Like on_bus, but contains the Node for the bus controller, or None if the
996      node is not on a bus.
997
998    flash_controller:
999      The flash controller for the node. Only meaningful for nodes representing
1000      flash partitions.
1001
1002    spi_cs_gpio:
1003      The device's SPI GPIO chip select as a ControllerAndData instance, if it
1004      exists, and None otherwise. See
1005      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
1006
1007    gpio_hogs:
1008      A list of ControllerAndData objects for the GPIOs hogged by the node. The
1009      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
1010      nodes.
1011
1012    is_pci_device:
1013      True if the node is a PCI device.
1014    """
1015
1016    def __init__(
1017        self,
1018        dt_node: dtlib_Node,
1019        edt: "EDT",
1020        support_fixed_partitions_on_any_bus: bool = True,
1021    ):
1022        '''
1023        For internal use only; not meant to be used outside edtlib itself.
1024        '''
1025
1026        compats = (
1027            dt_node.props["compatible"].to_strings()
1028            if "compatible" in dt_node.props
1029            else []
1030        )
1031
1032        # Private, don't touch outside the class:
1033        self._node: dtlib_Node = dt_node
1034        self._binding: Optional[Binding] = None
1035
1036        # Public, some of which are initialized properly later:
1037        self.edt: 'EDT' = edt
1038        self.dep_ordinal: int = -1
1039        self.compats: list[str] = compats
1040        self.ranges: list[Range] = []
1041        self.regs: list[Register] = []
1042        self.props: dict[str, Property] = {}
1043        self.interrupts: list[ControllerAndData] = []
1044        self.pinctrls: list[PinCtrl] = []
1045        self.bus_node = self._bus_node(support_fixed_partitions_on_any_bus)
1046        self.hash: str = _compute_hash(dt_node.path)
1047
1048        self._init_binding()
1049        self._init_regs()
1050        self._init_ranges()
1051
1052    @property
1053    def name(self) -> str:
1054        "See the class docstring"
1055        return self._node.name
1056
1057    @property
1058    def filename(self) -> str:
1059        "See the class docstring"
1060        return self._node.filename
1061
1062    @property
1063    def lineno(self) -> int:
1064        "See the class docstring"
1065        return self._node.lineno
1066
1067    @property
1068    def unit_addr(self) -> Optional[int]:
1069        "See the class docstring"
1070
1071        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1072
1073        # PCI devices use a different node name format (e.g. "pcie@1,0")
1074        if "@" not in self.name or self.is_pci_device:
1075            return None
1076
1077        try:
1078            addr = int(self.name.split("@", 1)[1], 16)
1079        except ValueError:
1080            _err(f"{self!r} has non-hex unit address")
1081
1082        return _translate(addr, self._node)
1083
1084    @property
1085    def description(self) -> Optional[str]:
1086        "See the class docstring."
1087        if self._binding:
1088            return self._binding.description
1089        return None
1090
1091    @property
1092    def path(self) ->  str:
1093        "See the class docstring"
1094        return self._node.path
1095
1096    @property
1097    def label(self) -> Optional[str]:
1098        "See the class docstring"
1099        if "label" in self._node.props:
1100            return self._node.props["label"].to_string()
1101        return None
1102
1103    @property
1104    def labels(self) -> list[str]:
1105        "See the class docstring"
1106        return self._node.labels
1107
1108    @property
1109    def parent(self) -> Optional['Node']:
1110        "See the class docstring"
1111        return self.edt._node2enode.get(self._node.parent) # type: ignore
1112
1113    @property
1114    def children(self) -> dict[str, 'Node']:
1115        "See the class docstring"
1116        # Could be initialized statically too to preserve identity, but not
1117        # sure if needed. Parent nodes being initialized before their children
1118        # would need to be kept in mind.
1119        return {name: self.edt._node2enode[node]
1120                for name, node in self._node.nodes.items()}
1121
1122    def child_index(self, node) -> int:
1123        """Get the index of *node* in self.children.
1124        Raises KeyError if the argument is not a child of this node.
1125        """
1126        if not hasattr(self, '_child2index'):
1127            # Defer initialization of this lookup table until this
1128            # method is callable to handle parents needing to be
1129            # initialized before their chidlren. By the time we
1130            # return from __init__, 'self.children' is callable.
1131            self._child2index: dict[str, int] = {}
1132            for index, child_path in enumerate(child.path for child in
1133                                               self.children.values()):
1134                self._child2index[child_path] = index
1135
1136        return self._child2index[node.path]
1137
1138    @property
1139    def required_by(self) -> list['Node']:
1140        "See the class docstring"
1141        return self.edt._graph.required_by(self)
1142
1143    @property
1144    def depends_on(self) -> list['Node']:
1145        "See the class docstring"
1146        return self.edt._graph.depends_on(self)
1147
1148    @property
1149    def status(self) -> str:
1150        "See the class docstring"
1151        status = self._node.props.get("status")
1152
1153        if status is None:
1154            as_string = "okay"
1155        else:
1156            as_string = status.to_string()
1157
1158        if as_string == "ok":
1159            as_string = "okay"
1160
1161        return as_string
1162
1163    @property
1164    def read_only(self) -> bool:
1165        "See the class docstring"
1166        return "read-only" in self._node.props
1167
1168    @property
1169    def matching_compat(self) -> Optional[str]:
1170        "See the class docstring"
1171        if self._binding:
1172            return self._binding.compatible
1173        return None
1174
1175    @property
1176    def binding_path(self) -> Optional[str]:
1177        "See the class docstring"
1178        if self._binding:
1179            return self._binding.path
1180        return None
1181
1182    @property
1183    def aliases(self) -> list[str]:
1184        "See the class docstring"
1185        return [alias for alias, node in self._node.dt.alias2node.items()
1186                if node is self._node]
1187
1188    @property
1189    def buses(self) -> list[str]:
1190        "See the class docstring"
1191        if self._binding:
1192            return self._binding.buses
1193        return []
1194
1195    @property
1196    def on_buses(self) -> list[str]:
1197        "See the class docstring"
1198        bus_node = self.bus_node
1199        return bus_node.buses if bus_node else []
1200
1201    @property
1202    def flash_controller(self) -> 'Node':
1203        "See the class docstring"
1204
1205        # The node path might be something like
1206        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1207        # up two levels to get the flash and check its compat. The flash
1208        # controller might be the flash itself (for cases like NOR flashes).
1209        # For the case of 'soc-nv-flash', we assume the controller is the
1210        # parent of the flash node.
1211
1212        if not self.parent or not self.parent.parent:
1213            _err(f"flash partition {self!r} lacks parent or grandparent node")
1214
1215        controller = self.parent.parent
1216        if controller.matching_compat == "soc-nv-flash":
1217            if controller.parent is None:
1218                _err(f"flash controller '{controller.path}' cannot be the root node")
1219            return controller.parent
1220        return controller
1221
1222    @property
1223    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1224        "See the class docstring"
1225
1226        if not ("spi" in self.on_buses
1227                and self.bus_node
1228                and "cs-gpios" in self.bus_node.props):
1229            return None
1230
1231        if not self.regs:
1232            _err(f"{self!r} needs a 'reg' property, to look up the "
1233                 "chip select index for SPI")
1234
1235        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1236        if TYPE_CHECKING:
1237            assert isinstance(parent_cs_lst, list)
1238
1239        # cs-gpios is indexed by the unit address
1240        cs_index = self.regs[0].addr
1241        if TYPE_CHECKING:
1242            assert isinstance(cs_index, int)
1243
1244        if cs_index >= len(parent_cs_lst):
1245            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1246                 "is >= number of cs-gpios in "
1247                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1248
1249        ret = parent_cs_lst[cs_index]
1250        if TYPE_CHECKING:
1251            assert isinstance(ret, ControllerAndData)
1252        return ret
1253
1254    @property
1255    def gpio_hogs(self) -> list[ControllerAndData]:
1256        "See the class docstring"
1257
1258        if "gpio-hog" not in self.props:
1259            return []
1260
1261        if not self.parent or not "gpio-controller" in self.parent.props:
1262            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1263
1264        if not "#gpio-cells" in self.parent._node.props:
1265            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1266
1267        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1268        res = []
1269
1270        for item in _slice(self._node, "gpios", 4*n_cells,
1271                           f"4*(<#gpio-cells> (= {n_cells})"):
1272            controller = self.parent
1273            res.append(ControllerAndData(
1274                node=self, controller=controller,
1275                data=self._named_cells(controller, item, "gpio"),
1276                name=None, basename="gpio"))
1277
1278        return res
1279
1280    @property
1281    def has_child_binding(self) -> bool:
1282        """
1283        True if the node's binding contains a child-binding definition, False
1284        otherwise
1285        """
1286        return bool(self._binding and self._binding.child_binding)
1287
1288    @property
1289    def is_pci_device(self) -> bool:
1290        "See the class docstring"
1291        return 'pcie' in self.on_buses
1292
1293    def __repr__(self) -> str:
1294        if self.binding_path:
1295            binding = "binding " + self.binding_path
1296        else:
1297            binding = "no binding"
1298        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1299
1300    def _init_binding(self) -> None:
1301        # Initializes Node._binding. It holds data from the node's binding file,
1302        # in the format returned by PyYAML (plain Python lists, dicts, etc.), or
1303        # None if the node has no binding.
1304
1305        # This relies on the parent of the node having already been
1306        # initialized, which is guaranteed by going through the nodes in
1307        # node_iter() order.
1308
1309        if self.path in self.edt._infer_binding_for_paths:
1310            self._binding_from_properties()
1311            return
1312
1313        if self.compats:
1314            on_buses = self.on_buses
1315
1316            for compat in self.compats:
1317                # When matching, respect the order of the 'compatible' entries,
1318                # and for each one first try to match against an explicitly
1319                # specified bus (if any) and then against any bus. This is so
1320                # that matching against bindings which do not specify a bus
1321                # works the same way in Zephyr as it does elsewhere.
1322                binding = None
1323
1324                for bus in on_buses:
1325                    if (compat, bus) in self.edt._compat2binding:
1326                        binding = self.edt._compat2binding[compat, bus]
1327                        break
1328
1329                if not binding:
1330                    if (compat, None) in self.edt._compat2binding:
1331                        binding = self.edt._compat2binding[compat, None]
1332                    else:
1333                        continue
1334
1335                self._binding = binding
1336                return
1337        else:
1338            # No 'compatible' property. See if the parent binding has
1339            # a compatible. This can come from one or more levels of
1340            # nesting with 'child-binding:'.
1341
1342            binding_from_parent = self._binding_from_parent()
1343            if binding_from_parent:
1344                self._binding = binding_from_parent
1345                return
1346
1347        # No binding found
1348        self._binding = None
1349
1350    def _binding_from_properties(self) -> None:
1351        # Sets up a Binding object synthesized from the properties in the node.
1352
1353        if self.compats:
1354            _err(f"compatible in node with inferred binding: {self.path}")
1355
1356        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1357        raw: dict[str, Any] = {
1358            'description': 'Inferred binding from properties, via edtlib.',
1359            'properties': {},
1360        }
1361        for name, prop in self._node.props.items():
1362            pp: dict[str, str] = {}
1363            if prop.type == Type.EMPTY:
1364                pp["type"] = "boolean"
1365            elif prop.type == Type.BYTES:
1366                pp["type"] = "uint8-array"
1367            elif prop.type == Type.NUM:
1368                pp["type"] = "int"
1369            elif prop.type == Type.NUMS:
1370                pp["type"] = "array"
1371            elif prop.type == Type.STRING:
1372                pp["type"] = "string"
1373            elif prop.type == Type.STRINGS:
1374                pp["type"] = "string-array"
1375            elif prop.type == Type.PHANDLE:
1376                pp["type"] = "phandle"
1377            elif prop.type == Type.PHANDLES:
1378                pp["type"] = "phandles"
1379            elif prop.type == Type.PHANDLES_AND_NUMS:
1380                pp["type"] = "phandle-array"
1381            elif prop.type == Type.PATH:
1382                pp["type"] = "path"
1383            else:
1384                _err(f"cannot infer binding from property: {prop} "
1385                     f"with type {prop.type!r}")
1386            raw['properties'][name] = pp
1387
1388        # Set up Node state.
1389        self.compats = []
1390        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1391
1392    def _binding_from_parent(self) -> Optional[Binding]:
1393        # Returns the binding from 'child-binding:' in the parent node's
1394        # binding.
1395
1396        if not self.parent:
1397            return None
1398
1399        pbinding = self.parent._binding
1400        if not pbinding:
1401            return None
1402
1403        if pbinding.child_binding:
1404            return pbinding.child_binding
1405
1406        return None
1407
1408    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1409                  ) -> Optional['Node']:
1410        # Returns the value for self.bus_node. Relies on parent nodes being
1411        # initialized before their children.
1412
1413        if not self.parent:
1414            # This is the root node
1415            return None
1416
1417        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1418        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1419        # bus be None means we'll always match the binding for fixed-partitions
1420        # also this means want processing the fixed-partitions node we wouldn't
1421        # try to do anything bus specific with it.
1422        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1423            return None
1424
1425        if self.parent.buses:
1426            # The parent node is a bus node
1427            return self.parent
1428
1429        # Same bus node as parent (possibly None)
1430        return self.parent.bus_node
1431
1432    def _init_crossrefs(
1433        self, default_prop_types: bool = False, err_on_deprecated: bool = False
1434    ) -> None:
1435        # Initializes all properties that require cross-references to other
1436        # nodes, like 'phandle' and 'phandles'. This is done after all nodes
1437        # have been initialized.
1438        self._init_props(
1439            default_prop_types=default_prop_types, err_on_deprecated=err_on_deprecated
1440        )
1441        self._init_interrupts()
1442        self._init_pinctrls()
1443
1444    def _init_props(self, default_prop_types: bool = False,
1445                    err_on_deprecated: bool = False) -> None:
1446        # Creates self.props. See the class docstring. Also checks that all
1447        # properties on the node are declared in its binding.
1448
1449        self.props = {}
1450
1451        if self._binding:
1452            prop2specs = self._binding.prop2specs
1453        else:
1454            prop2specs = None
1455
1456        # Initialize self.props
1457        if prop2specs:
1458            for prop_spec in prop2specs.values():
1459                self._init_prop(prop_spec, err_on_deprecated)
1460            self._check_undeclared_props()
1461        elif default_prop_types:
1462            for name in self._node.props:
1463                if name not in _DEFAULT_PROP_SPECS:
1464                    continue
1465                prop_spec = _DEFAULT_PROP_SPECS[name]
1466                val = self._prop_val(name, prop_spec, err_on_deprecated)
1467                self.props[name] = Property(prop_spec, val, self)
1468
1469    def _init_prop(self, prop_spec: PropertySpec,
1470                   err_on_deprecated: bool) -> None:
1471        # _init_props() helper for initializing a single property.
1472        # 'prop_spec' is a PropertySpec object from the node's binding.
1473
1474        name = prop_spec.name
1475        prop_type = prop_spec.type
1476        if not prop_type:
1477            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1478
1479        val = self._prop_val(name, prop_spec, err_on_deprecated)
1480
1481        if val is None:
1482            # 'required: false' property that wasn't there, or a property type
1483            # for which we store no data.
1484            return
1485
1486        enum = prop_spec.enum
1487        for subval in val if isinstance(val, list) else [val]:
1488            if enum and subval not in enum:
1489                _err(f"value of property '{name}' on {self.path} in "
1490                    f"{self.edt.dts_path} ({subval!r}) is not in 'enum' list in "
1491                    f"{self.binding_path} ({enum!r})")
1492
1493        const = prop_spec.const
1494        if const is not None and val != const:
1495            _err(f"value of property '{name}' on {self.path} in "
1496                 f"{self.edt.dts_path} ({val!r}) "
1497                 "is different from the 'const' value specified in "
1498                 f"{self.binding_path} ({const!r})")
1499
1500        # Skip properties that start with '#', like '#size-cells', and mapping
1501        # properties like 'gpio-map'/'interrupt-map'
1502        if name[0] == "#" or name.endswith("-map"):
1503            return
1504
1505        self.props[name] = Property(prop_spec, val, self)
1506
1507    def _prop_val(
1508        self,
1509        name: str,
1510        prop_spec: PropertySpec,
1511        err_on_deprecated: bool,
1512    ) -> PropertyValType:
1513        # _init_prop() helper for getting the property's value
1514        #
1515        # name:
1516        #   Property name from binding
1517        #
1518        # prop_spec:
1519        #   PropertySpec from binding
1520        #
1521        # err_on_deprecated:
1522        #   If True, a deprecated property is an error instead of warning.
1523
1524        node = self._node
1525        prop = node.props.get(name)
1526        binding_path = prop_spec.binding.path
1527        prop_type = prop_spec.type
1528        deprecated = prop_spec.deprecated
1529        required = prop_spec.required
1530        default = prop_spec.default
1531        specifier_space = prop_spec.specifier_space
1532
1533        if prop and deprecated:
1534            msg = (
1535                f"'{name}' is marked as deprecated in 'properties:' "
1536                f"in {binding_path} for node {node.path}."
1537            )
1538            if err_on_deprecated:
1539                _err(msg)
1540            else:
1541                _LOG.warning(msg)
1542
1543        if not prop:
1544            if required and self.status == "okay":
1545                _err(
1546                    f"'{name}' is marked as required in 'properties:' in "
1547                    f"{binding_path}, but does not appear in {node!r}"
1548                )
1549
1550            if default is not None:
1551                # YAML doesn't have a native format for byte arrays. We need to
1552                # convert those from an array like [0x12, 0x34, ...]. The
1553                # format has already been checked in
1554                # _check_prop_by_type().
1555                if prop_type == "uint8-array":
1556                    return bytes(default) # type: ignore
1557                return default
1558
1559            return False if prop_type == "boolean" else None
1560
1561        if prop_type == "boolean":
1562            if prop.type != Type.EMPTY:
1563                _err(f"'{name}' in {node!r} is defined with 'type: boolean' "
1564                     f"in {binding_path}, but is assigned a value ('{prop}') "
1565                     f"instead of being empty ('{name};')")
1566            return True
1567
1568        if prop_type == "int":
1569            return prop.to_num()
1570
1571        if prop_type == "array":
1572            return prop.to_nums()
1573
1574        if prop_type == "uint8-array":
1575            return prop.to_bytes()
1576
1577        if prop_type == "string":
1578            return prop.to_string()
1579
1580        if prop_type == "string-array":
1581            return prop.to_strings()
1582
1583        if prop_type == "phandle":
1584            return self.edt._node2enode[prop.to_node()]
1585
1586        if prop_type == "phandles":
1587            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1588
1589        if prop_type == "phandle-array":
1590            # This type is a bit high-level for dtlib as it involves
1591            # information from bindings and *-names properties, so there's no
1592            # to_phandle_array() in dtlib. Do the type check ourselves.
1593            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1594                _err(f"expected property '{name}' in {node.path} in "
1595                     f"{node.dt.filename} to be assigned "
1596                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1597                     f"(a mix of phandles and numbers), not '{prop}'")
1598
1599            return self._standard_phandle_val_list(prop, specifier_space)
1600
1601        if prop_type == "path":
1602            return self.edt._node2enode[prop.to_path()]
1603
1604        # prop_type == "compound". Checking that the 'type:'
1605        # value is valid is done in _check_prop_by_type().
1606        #
1607        # 'compound' is a dummy type for properties that don't fit any of the
1608        # patterns above, so that we can require all entries in 'properties:'
1609        # to have a 'type: ...'. No Property object is created for it.
1610        return None
1611
1612    def _check_undeclared_props(self) -> None:
1613        # Checks that all properties are declared in the binding
1614        wl = {"compatible", "status", "ranges", "phandle",
1615              "interrupt-parent", "interrupts-extended", "device_type"}
1616
1617        for prop_name in self._node.props:
1618            # Allow a few special properties to not be declared in the binding
1619            if (prop_name.endswith("-controller")
1620                or prop_name.startswith("#")
1621                or prop_name in wl):
1622                continue
1623
1624            if TYPE_CHECKING:
1625                assert self._binding
1626
1627            if prop_name not in self._binding.prop2specs:
1628                _err(f"'{prop_name}' appears in {self._node.path} in "
1629                     f"{self.edt.dts_path}, but is not declared in "
1630                     f"'properties:' in {self.binding_path}")
1631
1632    def _init_ranges(self) -> None:
1633        # Initializes self.ranges
1634        node = self._node
1635
1636        self.ranges = []
1637
1638        if "ranges" not in node.props:
1639            return
1640
1641        raw_child_address_cells = node.props.get("#address-cells")
1642        parent_address_cells = _address_cells(node)
1643        if raw_child_address_cells is None:
1644            child_address_cells = 2 # Default value per DT spec.
1645        else:
1646            child_address_cells = raw_child_address_cells.to_num()
1647        raw_child_size_cells = node.props.get("#size-cells")
1648        if raw_child_size_cells is None:
1649            child_size_cells = 1 # Default value per DT spec.
1650        else:
1651            child_size_cells = raw_child_size_cells.to_num()
1652
1653        # Number of cells for one translation 3-tuple in 'ranges'
1654        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1655
1656        if entry_cells == 0:
1657            if len(node.props["ranges"].value) == 0:
1658                return
1659            else:
1660                _err(f"'ranges' should be empty in {self._node.path} since "
1661                     f"<#address-cells> = {child_address_cells}, "
1662                     f"<#address-cells for parent> = {parent_address_cells} and "
1663                     f"<#size-cells> = {child_size_cells}")
1664
1665        for raw_range in _slice(node, "ranges", 4*entry_cells,
1666                                f"4*(<#address-cells> (= {child_address_cells}) + "
1667                                "<#address-cells for parent> "
1668                                f"(= {parent_address_cells}) + "
1669                                f"<#size-cells> (= {child_size_cells}))"):
1670
1671            child_bus_cells = child_address_cells
1672            if child_address_cells == 0:
1673                child_bus_addr = None
1674            else:
1675                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1676            parent_bus_cells = parent_address_cells
1677            if parent_address_cells == 0:
1678                parent_bus_addr = None
1679            else:
1680                parent_bus_addr = to_num(
1681                    raw_range[(4*child_address_cells):
1682                              (4*child_address_cells + 4*parent_address_cells)])
1683            length_cells = child_size_cells
1684            if child_size_cells == 0:
1685                length = None
1686            else:
1687                length = to_num(
1688                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1689
1690            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1691                                     parent_bus_cells, parent_bus_addr,
1692                                     length_cells, length))
1693
1694    def _init_regs(self) -> None:
1695        # Initializes self.regs
1696
1697        node = self._node
1698
1699        self.regs = []
1700
1701        if "reg" not in node.props:
1702            return
1703
1704        address_cells = _address_cells(node)
1705        size_cells = _size_cells(node)
1706
1707        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1708                              f"4*(<#address-cells> (= {address_cells}) + "
1709                              f"<#size-cells> (= {size_cells}))"):
1710            if address_cells == 0:
1711                addr = None
1712            else:
1713                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1714            if size_cells == 0:
1715                size = None
1716            else:
1717                size = to_num(raw_reg[4*address_cells:])
1718            # Size zero is ok for PCI devices
1719            if size_cells != 0 and size == 0 and not self.is_pci_device:
1720                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1721                     "(maybe you want a size of one or #size-cells = 0 "
1722                     "instead)")
1723
1724            # We'll fix up the name when we're done.
1725            self.regs.append(Register(self, None, addr, size))
1726
1727        _add_names(node, "reg", self.regs)
1728
1729    def _init_pinctrls(self) -> None:
1730        # Initializes self.pinctrls from any pinctrl-<index> properties
1731
1732        node = self._node
1733
1734        # pinctrl-<index> properties
1735        pinctrl_props = [prop for name, prop in node.props.items()
1736                         if re.match("pinctrl-[0-9]+", name)]
1737        # Sort by index
1738        pinctrl_props.sort(key=lambda prop: prop.name)
1739
1740        # Check indices
1741        for i, prop in enumerate(pinctrl_props):
1742            if prop.name != "pinctrl-" + str(i):
1743                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1744                     "- indices should be contiguous and start from zero")
1745
1746        self.pinctrls = []
1747        for prop in pinctrl_props:
1748            # We'll fix up the names below.
1749            self.pinctrls.append(PinCtrl(
1750                node=self,
1751                name=None,
1752                conf_nodes=[self.edt._node2enode[node]
1753                            for node in prop.to_nodes()]))
1754
1755        _add_names(node, "pinctrl", self.pinctrls)
1756
1757    def _init_interrupts(self) -> None:
1758        # Initializes self.interrupts
1759
1760        node = self._node
1761
1762        self.interrupts = []
1763
1764        for controller_node, data in _interrupts(node):
1765            # We'll fix up the names below.
1766            controller = self.edt._node2enode[controller_node]
1767            self.interrupts.append(ControllerAndData(
1768                node=self, controller=controller,
1769                data=self._named_cells(controller, data, "interrupt"),
1770                name=None, basename=None))
1771
1772        _add_names(node, "interrupt", self.interrupts)
1773
1774    def _standard_phandle_val_list(
1775            self,
1776            prop: dtlib_Property,
1777            specifier_space: Optional[str]
1778    ) -> list[Optional[ControllerAndData]]:
1779        # Parses a property like
1780        #
1781        #     <prop.name> = <phandle cell phandle cell ...>;
1782        #
1783        # where each phandle points to a controller node that has a
1784        #
1785        #     #<specifier_space>-cells = <size>;
1786        #
1787        # property that gives the number of cells in the value after the
1788        # controller's phandle in the property.
1789        #
1790        # E.g. with a property like
1791        #
1792        #     pwms = <&foo 1 2 &bar 3>;
1793        #
1794        # If 'specifier_space' is "pwm", then we should have this elsewhere
1795        # in the tree:
1796        #
1797        #     foo: ... {
1798        #             #pwm-cells = <2>;
1799        #     };
1800        #
1801        #     bar: ... {
1802        #             #pwm-cells = <1>;
1803        #     };
1804        #
1805        # These values can be given names using the <specifier_space>-names:
1806        # list in the binding for the phandle nodes.
1807        #
1808        # Also parses any
1809        #
1810        #     <specifier_space>-names = "...", "...", ...
1811        #
1812        # Returns a list of Optional[ControllerAndData] instances.
1813        #
1814        # An index is None if the underlying phandle-array element is
1815        # unspecified.
1816
1817        if not specifier_space:
1818            if prop.name.endswith("gpios"):
1819                # There's some slight special-casing for *-gpios properties in that
1820                # e.g. foo-gpios still maps to #gpio-cells rather than
1821                # #foo-gpio-cells
1822                specifier_space = "gpio"
1823            else:
1824                # Strip -s. We've already checked that property names end in -s
1825                # if there is no specifier space in _check_prop_by_type().
1826                specifier_space = prop.name[:-1]
1827
1828        res: list[Optional[ControllerAndData]] = []
1829
1830        for item in _phandle_val_list(prop, specifier_space):
1831            if item is None:
1832                res.append(None)
1833                continue
1834
1835            controller_node, data = item
1836            mapped_controller, mapped_data = (
1837                _map_phandle_array_entry(prop.node, controller_node,
1838                                         data, specifier_space))
1839
1840            controller = self.edt._node2enode[mapped_controller]
1841            # We'll fix up the names below.
1842            res.append(ControllerAndData(
1843                node=self, controller=controller,
1844                data=self._named_cells(controller, mapped_data,
1845                                       specifier_space),
1846                name=None, basename=specifier_space))
1847
1848        _add_names(self._node, specifier_space, res)
1849
1850        return res
1851
1852    def _named_cells(
1853            self,
1854            controller: 'Node',
1855            data: bytes,
1856            basename: str
1857    ) -> dict[str, int]:
1858        # Returns a dictionary that maps <basename>-cells names given in the
1859        # binding for 'controller' to cell values. 'data' is the raw data, as a
1860        # byte array.
1861
1862        if not controller._binding:
1863            _err(f"{basename} controller {controller._node!r} "
1864                 f"for {self._node!r} lacks binding")
1865
1866        if basename in controller._binding.specifier2cells:
1867            cell_names: list[str] = controller._binding.specifier2cells[basename]
1868        else:
1869            # Treat no *-cells in the binding the same as an empty *-cells, so
1870            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1871            # '#clock-cells = <0>'.
1872            cell_names = []
1873
1874        data_list = to_nums(data)
1875        if len(data_list) != len(cell_names):
1876            _err(f"unexpected '{basename}-cells:' length in binding for "
1877                 f"{controller._node!r} - {len(cell_names)} "
1878                 f"instead of {len(data_list)}")
1879
1880        return dict(zip(cell_names, data_list))
1881
1882
1883class EDT:
1884    """
1885    Represents a devicetree augmented with information from bindings.
1886
1887    These attributes are available on EDT objects:
1888
1889    nodes:
1890      A list of Node objects for the nodes that appear in the devicetree
1891
1892    compat2nodes:
1893      A collections.defaultdict that maps each 'compatible' string that appears
1894      on some Node to a list of Nodes with that compatible.
1895      The collection is sorted so that enabled nodes appear first in the
1896      collection.
1897
1898    compat2okay:
1899      Like compat2nodes, but just for nodes with status 'okay'.
1900
1901    compat2notokay:
1902      Like compat2nodes, but just for nodes with status not 'okay'.
1903
1904    compat2vendor:
1905      A collections.defaultdict that maps each 'compatible' string that appears
1906      on some Node to a vendor name parsed from vendor_prefixes.
1907
1908    compat2model:
1909      A collections.defaultdict that maps each 'compatible' string that appears
1910      on some Node to a model name parsed from that compatible.
1911
1912    label2node:
1913      A dict that maps a node label to the node with that label.
1914
1915    dep_ord2node:
1916      A dict that maps an ordinal to the node with that dependency ordinal.
1917
1918    chosen_nodes:
1919      A dict that maps the properties defined on the devicetree's /chosen
1920      node to their values. 'chosen' is indexed by property name (a string),
1921      and values are converted to Node objects. Note that properties of the
1922      /chosen node which can't be converted to a Node are not included in
1923      the value.
1924
1925    dts_path:
1926      The .dts path passed to __init__()
1927
1928    dts_source:
1929      The final DTS source code of the loaded devicetree after merging nodes
1930      and processing /delete-node/ and /delete-property/, as a string
1931
1932    bindings_dirs:
1933      The bindings directory paths passed to __init__()
1934
1935    scc_order:
1936      A list of lists of Nodes. All elements of each list
1937      depend on each other, and the Nodes in any list do not depend
1938      on any Node in a subsequent list. Each list defines a Strongly
1939      Connected Component (SCC) of the graph.
1940
1941      For an acyclic graph each list will be a singleton. Cycles
1942      will be represented by lists with multiple nodes. Cycles are
1943      not expected to be present in devicetree graphs.
1944
1945    The standard library's pickle module can be used to marshal and
1946    unmarshal EDT objects.
1947    """
1948
1949    def __init__(self,
1950                 dts: Optional[str],
1951                 bindings_dirs: list[str],
1952                 warn_reg_unit_address_mismatch: bool = True,
1953                 default_prop_types: bool = True,
1954                 support_fixed_partitions_on_any_bus: bool = True,
1955                 infer_binding_for_paths: Optional[Iterable[str]] = None,
1956                 vendor_prefixes: Optional[dict[str, str]] = None,
1957                 werror: bool = False):
1958        """EDT constructor.
1959
1960        dts:
1961          Path to devicetree .dts file. Passing None for this value
1962          is only for internal use; do not do that outside of edtlib.
1963
1964        bindings_dirs:
1965          List of paths to directories containing bindings, in YAML format.
1966          These directories are recursively searched for .yaml files.
1967
1968        warn_reg_unit_address_mismatch (default: True):
1969          If True, a warning is logged if a node has a 'reg' property where
1970          the address of the first entry does not match the unit address of the
1971          node
1972
1973        default_prop_types (default: True):
1974          If True, default property types will be used when a node has no
1975          bindings.
1976
1977        support_fixed_partitions_on_any_bus (default True):
1978          If True, set the Node.bus for 'fixed-partitions' compatible nodes
1979          to None.  This allows 'fixed-partitions' binding to match regardless
1980          of the bus the 'fixed-partition' is under.
1981
1982        infer_binding_for_paths (default: None):
1983          An iterable of devicetree paths identifying nodes for which bindings
1984          should be inferred from the node content.  (Child nodes are not
1985          processed.)  Pass none if no nodes should support inferred bindings.
1986
1987        vendor_prefixes (default: None):
1988          A dict mapping vendor prefixes in compatible properties to their
1989          descriptions. If given, compatibles in the form "manufacturer,device"
1990          for which "manufacturer" is neither a key in the dict nor a specially
1991          exempt set of grandfathered-in cases will cause warnings.
1992
1993        werror (default: False):
1994          If True, some edtlib specific warnings become errors. This currently
1995          errors out if 'dts' has any deprecated properties set, or an unknown
1996          vendor prefix is used.
1997        """
1998        # All instance attributes should be initialized here.
1999        # This makes it easy to keep track of them, which makes
2000        # implementing __deepcopy__() easier.
2001        # If you change this, make sure to update __deepcopy__() too,
2002        # and update the tests for that method.
2003
2004        # Public attributes (the rest are properties)
2005        self.nodes: list[Node] = []
2006        self.compat2nodes: dict[str, list[Node]] = defaultdict(list)
2007        self.compat2okay: dict[str, list[Node]] = defaultdict(list)
2008        self.compat2notokay: dict[str, list[Node]] = defaultdict(list)
2009        self.compat2vendor: dict[str, str] = defaultdict(str)
2010        self.compat2model: dict[str, str]  = defaultdict(str)
2011        self.label2node: dict[str, Node] = {}
2012        self.dep_ord2node: dict[int, Node] = {}
2013        self.dts_path: str = dts # type: ignore
2014        self.bindings_dirs: list[str] = list(bindings_dirs)
2015
2016        # Saved kwarg values for internal use
2017        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
2018        self._default_prop_types: bool = default_prop_types
2019        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
2020        self._infer_binding_for_paths: set[str] = set(infer_binding_for_paths or [])
2021        self._vendor_prefixes: dict[str, str] = vendor_prefixes or {}
2022        self._werror: bool = bool(werror)
2023
2024        # Other internal state
2025        self._compat2binding: dict[tuple[str, Optional[str]], Binding] = {}
2026        self._graph: Graph = Graph()
2027        self._binding_paths: list[str] = _binding_paths(self.bindings_dirs)
2028        self._binding_fname2path: dict[str, str] = {
2029            os.path.basename(path): path
2030            for path in self._binding_paths
2031        }
2032        self._node2enode: dict[dtlib_Node, Node] = {}
2033
2034        if dts is not None:
2035            try:
2036                self._dt = DT(dts)
2037            except DTError as e:
2038                raise EDTError(e) from e
2039            self._finish_init()
2040
2041    def _finish_init(self) -> None:
2042        # This helper exists to make the __deepcopy__() implementation
2043        # easier to keep in sync with __init__().
2044        _check_dt(self._dt)
2045
2046        self._init_compat2binding()
2047        self._init_nodes()
2048        self._init_graph()
2049        self._init_luts()
2050
2051        self._check()
2052
2053    def get_node(self, path: str) -> Node:
2054        """
2055        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
2056        path or alias doesn't exist.
2057        """
2058        try:
2059            return self._node2enode[self._dt.get_node(path)]
2060        except DTError as e:
2061            _err(e)
2062
2063    @property
2064    def chosen_nodes(self) -> dict[str, Node]:
2065        ret: dict[str, Node] = {}
2066
2067        try:
2068            chosen = self._dt.get_node("/chosen")
2069        except DTError:
2070            return ret
2071
2072        for name, prop in chosen.props.items():
2073            try:
2074                node = prop.to_path()
2075            except DTError:
2076                # DTS value is not phandle or string, or path doesn't exist
2077                continue
2078
2079            ret[name] = self._node2enode[node]
2080
2081        return ret
2082
2083    def chosen_node(self, name: str) -> Optional[Node]:
2084        """
2085        Returns the Node pointed at by the property named 'name' in /chosen, or
2086        None if the property is missing
2087        """
2088        return self.chosen_nodes.get(name)
2089
2090    @property
2091    def dts_source(self) -> str:
2092        return f"{self._dt}"
2093
2094    def __repr__(self) -> str:
2095        return (f"<EDT for '{self.dts_path}', binding directories "
2096                f"'{self.bindings_dirs}'>")
2097
2098    def __deepcopy__(self, memo) -> 'EDT':
2099        """
2100        Implements support for the standard library copy.deepcopy()
2101        function on EDT instances.
2102        """
2103
2104        ret = EDT(
2105            None,
2106            self.bindings_dirs,
2107            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2108            default_prop_types=self._default_prop_types,
2109            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2110            infer_binding_for_paths=set(self._infer_binding_for_paths),
2111            vendor_prefixes=dict(self._vendor_prefixes),
2112            werror=self._werror
2113        )
2114        ret.dts_path = self.dts_path
2115        ret._dt = deepcopy(self._dt, memo)
2116        ret._finish_init()
2117        return ret
2118
2119    @property
2120    def scc_order(self) -> list[list[Node]]:
2121        try:
2122            return self._graph.scc_order()
2123        except Exception as e:
2124            raise EDTError(e)
2125
2126    def _process_properties_r(self, root_node: Node, props_node: Node) -> None:
2127        """
2128        Process props_node properties for dependencies, and add those as
2129        dependencies of root_node. Then walk through all the props_node
2130        children and do the same recursively, maintaining the same root_node.
2131
2132        This ensures that on a node with child nodes, the parent node includes
2133        the dependencies of all the child nodes as well as its own.
2134        """
2135        # A Node depends on any Nodes present in 'phandle',
2136        # 'phandles', or 'phandle-array' property values.
2137        for prop in props_node.props.values():
2138            if prop.type == 'phandle':
2139                self._graph.add_edge(root_node, prop.val)
2140            elif prop.type == 'phandles':
2141                if TYPE_CHECKING:
2142                    assert isinstance(prop.val, list)
2143                for phandle_node in prop.val:
2144                    self._graph.add_edge(root_node, phandle_node)
2145            elif prop.type == 'phandle-array':
2146                if TYPE_CHECKING:
2147                    assert isinstance(prop.val, list)
2148                for cd in prop.val:
2149                    if cd is None:
2150                        continue
2151                    if TYPE_CHECKING:
2152                        assert isinstance(cd, ControllerAndData)
2153                    self._graph.add_edge(root_node, cd.controller)
2154
2155        # A Node depends on whatever supports the interrupts it
2156        # generates.
2157        for intr in props_node.interrupts:
2158            self._graph.add_edge(root_node, intr.controller)
2159
2160        # If the binding defines child bindings, link the child properties to
2161        # the root_node as well.
2162        if props_node.has_child_binding:
2163            for child in props_node.children.values():
2164                if "compatible" in child.props:
2165                    # Not a child node, normal node on a different binding.
2166                    continue
2167                self._process_properties_r(root_node, child)
2168
2169    def _process_properties(self, node: Node) -> None:
2170        """
2171        Add node dependencies based on own as well as child node properties,
2172        start from the node itself.
2173        """
2174        self._process_properties_r(node, node)
2175
2176    def _init_graph(self) -> None:
2177        # Constructs a graph of dependencies between Node instances,
2178        # which is usable for computing a partial order over the dependencies.
2179        # The algorithm supports detecting dependency loops.
2180        #
2181        # Actually computing the SCC order is lazily deferred to the
2182        # first time the scc_order property is read.
2183
2184        for node in self.nodes:
2185            # Always insert root node
2186            if not node.parent:
2187                self._graph.add_node(node)
2188
2189            # A Node always depends on its parent.
2190            for child in node.children.values():
2191                self._graph.add_edge(child, node)
2192
2193            self._process_properties(node)
2194
2195    def _init_compat2binding(self) -> None:
2196        # Creates self._compat2binding, a dictionary that maps
2197        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2198        #
2199        # The Binding objects are created from YAML files discovered
2200        # in self.bindings_dirs as needed.
2201        #
2202        # For example, self._compat2binding["company,dev", "can"]
2203        # contains the Binding for the 'company,dev' device, when it
2204        # appears on the CAN bus.
2205        #
2206        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2207        # self._compat2binding["company,notonbus", None] is the Binding.
2208        #
2209        # Only bindings for 'compatible' strings that appear in the devicetree
2210        # are loaded.
2211
2212        dt_compats = _dt_compats(self._dt)
2213        # Searches for any 'compatible' string mentioned in the devicetree
2214        # files, with a regex
2215        dt_compats_search = re.compile(
2216            "|".join(re.escape(compat) for compat in dt_compats)
2217        ).search
2218
2219        for binding_path in self._binding_paths:
2220            with open(binding_path, encoding="utf-8") as f:
2221                contents = f.read()
2222
2223            # As an optimization, skip parsing files that don't contain any of
2224            # the .dts 'compatible' strings, which should be reasonably safe
2225            if not dt_compats_search(contents):
2226                continue
2227
2228            # Load the binding and check that it actually matches one of the
2229            # compatibles. Might get false positives above due to comments and
2230            # stuff.
2231
2232            try:
2233                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2234                # representing the file)
2235                raw = yaml.load(contents, Loader=_BindingLoader)
2236            except yaml.YAMLError as e:
2237                _err(
2238                        f"'{binding_path}' appears in binding directories "
2239                        f"but isn't valid YAML: {e}")
2240
2241            # Convert the raw data to a Binding object, erroring out
2242            # if necessary.
2243            binding = self._binding(raw, binding_path, dt_compats)
2244
2245            # Register the binding in self._compat2binding, along with
2246            # any child bindings that have their own compatibles.
2247            while binding is not None:
2248                if binding.compatible:
2249                    self._register_binding(binding)
2250                binding = binding.child_binding
2251
2252    def _binding(self,
2253                 raw: Optional[dict],
2254                 binding_path: str,
2255                 dt_compats: set[str]) -> Optional[Binding]:
2256        # Convert a 'raw' binding from YAML to a Binding object and return it.
2257        #
2258        # Error out if the raw data looks like an invalid binding.
2259        #
2260        # Return None if the file doesn't contain a binding or the
2261        # binding's compatible isn't in dt_compats.
2262
2263        # Get the 'compatible:' string.
2264        if raw is None or "compatible" not in raw:
2265            # Empty file, binding fragment, spurious file, etc.
2266            return None
2267
2268        compatible = raw["compatible"]
2269
2270        if compatible not in dt_compats:
2271            # Not a compatible we care about.
2272            return None
2273
2274        # Initialize and return the Binding object.
2275        return Binding(binding_path, self._binding_fname2path, raw=raw)
2276
2277    def _register_binding(self, binding: Binding) -> None:
2278        # Do not allow two different bindings to have the same
2279        # 'compatible:'/'on-bus:' combo
2280        if TYPE_CHECKING:
2281            assert binding.compatible
2282        old_binding = self._compat2binding.get((binding.compatible,
2283                                                binding.on_bus))
2284        if old_binding:
2285            msg = (f"both {old_binding.path} and {binding.path} have "
2286                   f"'compatible: {binding.compatible}'")
2287            if binding.on_bus is not None:
2288                msg += f" and 'on-bus: {binding.on_bus}'"
2289            _err(msg)
2290
2291        # Register the binding.
2292        self._compat2binding[binding.compatible, binding.on_bus] = binding
2293
2294    def _init_nodes(self) -> None:
2295        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2296        # self.nodes
2297
2298        hash2node: dict[str, Node] = {}
2299
2300        for dt_node in self._dt.node_iter():
2301            # Warning: We depend on parent Nodes being created before their
2302            # children. This is guaranteed by node_iter().
2303            node = Node(dt_node, self, self._fixed_partitions_no_bus)
2304
2305            if node.hash in hash2node:
2306                _err(f"hash collision between '{node.path}' and "
2307                     f"'{hash2node[node.hash].path}'")
2308            hash2node[node.hash] = node
2309
2310            self.nodes.append(node)
2311            self._node2enode[dt_node] = node
2312
2313        for node in self.nodes:
2314            # Initialize properties that may depend on other Node objects having
2315            # been created, because they (either always or sometimes) reference
2316            # other nodes. Must be called separately after all nodes have been
2317            # created.
2318            node._init_crossrefs(
2319                default_prop_types=self._default_prop_types,
2320                err_on_deprecated=self._werror,
2321            )
2322
2323        if self._warn_reg_unit_address_mismatch:
2324            # This warning matches the simple_bus_reg warning in dtc
2325            for node in self.nodes:
2326                # Address mismatch is ok for PCI devices
2327                if (node.regs and node.regs[0].addr != node.unit_addr and
2328                        not node.is_pci_device):
2329                    _LOG.warning("unit address and first address in 'reg' "
2330                                 f"(0x{node.regs[0].addr:x}) don't match for "
2331                                 f"{node.path}")
2332
2333    def _init_luts(self) -> None:
2334        # Initialize node lookup tables (LUTs).
2335
2336        for node in self.nodes:
2337            for label in node.labels:
2338                self.label2node[label] = node
2339
2340            for compat in node.compats:
2341                if node.status == "okay":
2342                    self.compat2okay[compat].append(node)
2343                else:
2344                    self.compat2notokay[compat].append(node)
2345
2346                if compat in self.compat2vendor:
2347                    continue
2348
2349                # The regular expression comes from dt-schema.
2350                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2351                if not re.match(compat_re, compat):
2352                    _err(f"node '{node.path}' compatible '{compat}' "
2353                         'must match this regular expression: '
2354                         f"'{compat_re}'")
2355
2356                if ',' in compat and self._vendor_prefixes:
2357                    vendor, model = compat.split(',', 1)
2358                    if vendor in self._vendor_prefixes:
2359                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2360                        self.compat2model[compat] = model
2361
2362                    # As an exception, the root node can have whatever
2363                    # compatibles it wants. Other nodes get checked.
2364                    elif node.path != '/':
2365                        if self._werror:
2366                            handler_fn: Any = _err
2367                        else:
2368                            handler_fn = _LOG.warning
2369                        handler_fn(
2370                            f"node '{node.path}' compatible '{compat}' "
2371                            f"has unknown vendor prefix '{vendor}'")
2372
2373        for compat, nodes in self.compat2okay.items():
2374            self.compat2nodes[compat].extend(nodes)
2375
2376        for compat, nodes in self.compat2notokay.items():
2377            self.compat2nodes[compat].extend(nodes)
2378
2379        for nodeset in self.scc_order:
2380            node = nodeset[0]
2381            self.dep_ord2node[node.dep_ordinal] = node
2382
2383    def _check(self) -> None:
2384        # Tree-wide checks and warnings.
2385
2386        for binding in self._compat2binding.values():
2387            for spec in binding.prop2specs.values():
2388                if not spec.enum or spec.type != 'string':
2389                    continue
2390
2391                if not spec.enum_tokenizable:
2392                    _LOG.warning(
2393                        f"compatible '{binding.compatible}' "
2394                        f"in binding '{binding.path}' has non-tokenizable enum "
2395                        f"for property '{spec.name}': " +
2396                        ', '.join(repr(x) for x in spec.enum))
2397                elif not spec.enum_upper_tokenizable:
2398                    _LOG.warning(
2399                        f"compatible '{binding.compatible}' "
2400                        f"in binding '{binding.path}' has enum for property "
2401                        f"'{spec.name}' that is only tokenizable "
2402                        'in lowercase: ' +
2403                        ', '.join(repr(x) for x in spec.enum))
2404
2405        # Validate the contents of compatible properties.
2406        for node in self.nodes:
2407            if 'compatible' not in node.props:
2408                continue
2409
2410            compatibles = node.props['compatible'].val
2411
2412            # _check() runs after _init_compat2binding() has called
2413            # _dt_compats(), which already converted every compatible
2414            # property to a list of strings. So we know 'compatibles'
2415            # is a list, but add an assert for future-proofing.
2416            assert isinstance(compatibles, list)
2417
2418            for compat in compatibles:
2419                # This is also just for future-proofing.
2420                assert isinstance(compat, str)
2421
2422
2423def bindings_from_paths(yaml_paths: list[str],
2424                        ignore_errors: bool = False) -> list[Binding]:
2425    """
2426    Get a list of Binding objects from the yaml files 'yaml_paths'.
2427
2428    If 'ignore_errors' is True, YAML files that cause an EDTError when
2429    loaded are ignored. (No other exception types are silenced.)
2430    """
2431
2432    ret = []
2433    fname2path = {os.path.basename(path): path for path in yaml_paths}
2434    for path in yaml_paths:
2435        try:
2436            ret.append(Binding(path, fname2path))
2437        except EDTError:
2438            if ignore_errors:
2439                continue
2440            raise
2441
2442    return ret
2443
2444
2445class EDTError(Exception):
2446    "Exception raised for devicetree- and binding-related errors"
2447
2448#
2449# Public global functions
2450#
2451
2452
2453def load_vendor_prefixes_txt(vendor_prefixes: str) -> dict[str, str]:
2454    """Load a vendor-prefixes.txt file and return a dict
2455    representation mapping a vendor prefix to the vendor name.
2456    """
2457    vnd2vendor: dict[str, str] = {}
2458    with open(vendor_prefixes, 'r', encoding='utf-8') as f:
2459        for line in f:
2460            line = line.strip()
2461
2462            if not line or line.startswith('#'):
2463                # Comment or empty line.
2464                continue
2465
2466            # Other lines should be in this form:
2467            #
2468            # <vnd><TAB><vendor>
2469            vnd_vendor = line.split('\t', 1)
2470            assert len(vnd_vendor) == 2, line
2471            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2472    return vnd2vendor
2473
2474#
2475# Private global functions
2476#
2477
2478
2479def _dt_compats(dt: DT) -> set[str]:
2480    # Returns a set() with all 'compatible' strings in the devicetree
2481    # represented by dt (a dtlib.DT instance)
2482
2483    return {compat
2484            for node in dt.node_iter()
2485                if "compatible" in node.props
2486                    for compat in node.props["compatible"].to_strings()}
2487
2488
2489def _binding_paths(bindings_dirs: list[str]) -> list[str]:
2490    # Returns a list with the paths to all bindings (.yaml files) in
2491    # 'bindings_dirs'
2492
2493    binding_paths = []
2494
2495    for bindings_dir in bindings_dirs:
2496        for root, _, filenames in os.walk(bindings_dir):
2497            for filename in filenames:
2498                if filename.endswith(".yaml") or filename.endswith(".yml"):
2499                    binding_paths.append(os.path.join(root, filename))
2500
2501    return binding_paths
2502
2503
2504def _binding_inc_error(msg):
2505    # Helper for reporting errors in the !include implementation
2506
2507    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2508
2509
2510def _check_include_dict(name: Optional[str],
2511                        allowlist: Optional[list[str]],
2512                        blocklist: Optional[list[str]],
2513                        child_filter: Optional[dict],
2514                        binding_path: Optional[str]) -> None:
2515    # Check that an 'include:' named 'name' with property-allowlist
2516    # 'allowlist', property-blocklist 'blocklist', and
2517    # child-binding filter 'child_filter' has valid structure.
2518
2519    if name is None:
2520        _err(f"'include:' element in {binding_path} "
2521             "should have a 'name' key")
2522
2523    if allowlist is not None and blocklist is not None:
2524        _err(f"'include:' of file '{name}' in {binding_path} "
2525             "should not specify both 'property-allowlist:' "
2526             "and 'property-blocklist:'")
2527
2528    while child_filter is not None:
2529        child_copy = deepcopy(child_filter)
2530        child_allowlist: Optional[list[str]] = (
2531            child_copy.pop('property-allowlist', None))
2532        child_blocklist: Optional[list[str]] = (
2533            child_copy.pop('property-blocklist', None))
2534        next_child_filter: Optional[dict] = (
2535            child_copy.pop('child-binding', None))
2536
2537        if child_copy:
2538            # We've popped out all the valid keys.
2539            _err(f"'include:' of file '{name}' in {binding_path} "
2540                 "should not have these unexpected contents in a "
2541                 f"'child-binding': {child_copy}")
2542
2543        if child_allowlist is not None and child_blocklist is not None:
2544            _err(f"'include:' of file '{name}' in {binding_path} "
2545                 "should not specify both 'property-allowlist:' and "
2546                 "'property-blocklist:' in a 'child-binding:'")
2547
2548        child_filter = next_child_filter
2549
2550
2551def _filter_properties(raw: dict,
2552                       allowlist: Optional[list[str]],
2553                       blocklist: Optional[list[str]],
2554                       child_filter: Optional[dict],
2555                       binding_path: Optional[str]) -> None:
2556    # Destructively modifies 'raw["properties"]' and
2557    # 'raw["child-binding"]', if they exist, according to
2558    # 'allowlist', 'blocklist', and 'child_filter'.
2559
2560    props = raw.get('properties')
2561    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2562
2563    child_binding = raw.get('child-binding')
2564    while child_filter is not None and child_binding is not None:
2565        _filter_properties_helper(child_binding.get('properties'),
2566                                  child_filter.get('property-allowlist'),
2567                                  child_filter.get('property-blocklist'),
2568                                  binding_path)
2569        child_filter = child_filter.get('child-binding')
2570        child_binding = child_binding.get('child-binding')
2571
2572
2573def _filter_properties_helper(props: Optional[dict],
2574                              allowlist: Optional[list[str]],
2575                              blocklist: Optional[list[str]],
2576                              binding_path: Optional[str]) -> None:
2577    if props is None or (allowlist is None and blocklist is None):
2578        return
2579
2580    _check_prop_filter('property-allowlist', allowlist, binding_path)
2581    _check_prop_filter('property-blocklist', blocklist, binding_path)
2582
2583    if allowlist is not None:
2584        allowset = set(allowlist)
2585        to_del = [prop for prop in props if prop not in allowset]
2586    else:
2587        if TYPE_CHECKING:
2588            assert blocklist
2589        blockset = set(blocklist)
2590        to_del = [prop for prop in props if prop in blockset]
2591
2592    for prop in to_del:
2593        del props[prop]
2594
2595
2596def _check_prop_filter(name: str, value: Optional[list[str]],
2597                       binding_path: Optional[str]) -> None:
2598    # Ensure an include: ... property-allowlist or property-blocklist
2599    # is a list.
2600
2601    if value is None:
2602        return
2603
2604    if not isinstance(value, list):
2605        _err(f"'{name}' value {value} in {binding_path} should be a list")
2606
2607
2608def _merge_props(to_dict: dict,
2609                 from_dict: dict,
2610                 parent: Optional[str],
2611                 binding_path: Optional[str],
2612                 check_required: bool = False):
2613    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2614    #
2615    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2616    # property, then the values are ORed together.
2617    #
2618    # If 'check_required' is True, then an error is raised if 'from_dict' has
2619    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2620    # bindings from "downgrading" requirements from bindings they include,
2621    # which might help keep bindings well-organized.
2622    #
2623    # It's an error for most other keys to appear in both 'from_dict' and
2624    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2625    # precedence.
2626    #
2627    # 'parent' is the name of the parent key containing 'to_dict' and
2628    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2629    # These are used to generate errors for sketchy property overwrites.
2630
2631    for prop in from_dict:
2632        if (isinstance(to_dict.get(prop), dict)
2633            and isinstance(from_dict[prop], dict)):
2634            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2635                         check_required)
2636        elif prop not in to_dict:
2637            to_dict[prop] = from_dict[prop]
2638        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2639            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2640                 f"from included file overwritten ('{from_dict[prop]}' "
2641                 f"replaced with '{to_dict[prop]}')")
2642        elif prop == "required":
2643            # Need a separate check here, because this code runs before
2644            # Binding._check()
2645            if not (isinstance(from_dict["required"], bool) and
2646                    isinstance(to_dict["required"], bool)):
2647                _err(f"malformed 'required:' setting for '{parent}' in "
2648                     f"'properties' in {binding_path}, expected true/false")
2649
2650            # 'required: true' takes precedence
2651            to_dict["required"] = to_dict["required"] or from_dict["required"]
2652
2653
2654def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2655                   check_required: bool) -> bool:
2656    # _merge_props() helper. Returns True in cases where it's bad that
2657    # to_dict[prop] takes precedence over from_dict[prop].
2658
2659    if to_dict[prop] == from_dict[prop]:
2660        return False
2661
2662    # These are overridden deliberately
2663    if prop in {"title", "description", "compatible"}:
2664        return False
2665
2666    if prop == "required":
2667        if not check_required:
2668            return False
2669        return from_dict[prop] and not to_dict[prop]
2670
2671    return True
2672
2673
2674def _binding_include(loader, node):
2675    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2676    # just becomes [foo, bar].
2677
2678    if isinstance(node, yaml.ScalarNode):
2679        # !include foo.yaml
2680        return [loader.construct_scalar(node)]
2681
2682    if isinstance(node, yaml.SequenceNode):
2683        # !include [foo.yaml, bar.yaml]
2684        return loader.construct_sequence(node)
2685
2686    _binding_inc_error("unrecognised node type in !include statement")
2687
2688
2689def _check_prop_by_type(prop_name: str,
2690                        options: dict,
2691                        binding_path: Optional[str]) -> None:
2692    # Binding._check_properties() helper. Checks 'type:', 'default:',
2693    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2694
2695    prop_type = options.get("type")
2696    default = options.get("default")
2697    const = options.get("const")
2698
2699    if prop_type is None:
2700        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2701             f"{binding_path}")
2702
2703    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2704                "string-array", "phandle", "phandles", "phandle-array",
2705                "path", "compound"}
2706
2707    if prop_type not in ok_types:
2708        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2709             f"has unknown type '{prop_type}', expected one of " +
2710             ", ".join(ok_types))
2711
2712    if "specifier-space" in options and prop_type != "phandle-array":
2713        _err(f"'specifier-space' in 'properties: {prop_name}' "
2714             f"has type '{prop_type}', expected 'phandle-array'")
2715
2716    if prop_type == "phandle-array":
2717        if not prop_name.endswith("s") and not "specifier-space" in options:
2718            _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2719                 f"has type 'phandle-array' and its name does not end in 's', "
2720                 f"but no 'specifier-space' was provided.")
2721
2722    # If you change const_types, be sure to update the type annotation
2723    # for PropertySpec.const.
2724    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2725    if const and prop_type not in const_types:
2726        _err(f"const in {binding_path} for property '{prop_name}' "
2727             f"has type '{prop_type}', expected one of " +
2728             ", ".join(const_types))
2729
2730    # Check default
2731
2732    if default is None:
2733        return
2734
2735    if prop_type in {"boolean", "compound", "phandle", "phandles",
2736                     "phandle-array", "path"}:
2737        _err("'default:' can't be combined with "
2738             f"'type: {prop_type}' for '{prop_name}' in "
2739             f"'properties:' in {binding_path}")
2740
2741    def ok_default() -> bool:
2742        # Returns True if 'default' is an okay default for the property's type.
2743        # If you change this, be sure to update the type annotation for
2744        # PropertySpec.default.
2745
2746        if (prop_type == "int" and isinstance(default, int)
2747            or prop_type == "string" and isinstance(default, str)):
2748            return True
2749
2750        # array, uint8-array, or string-array
2751
2752        if not isinstance(default, list):
2753            return False
2754
2755        if (prop_type == "array"
2756            and all(isinstance(val, int) for val in default)):
2757            return True
2758
2759        if (prop_type == "uint8-array"
2760            and all(isinstance(val, int)
2761                    and 0 <= val <= 255 for val in default)):
2762            return True
2763
2764        # string-array
2765        return all(isinstance(val, str) for val in default)
2766
2767    if not ok_default():
2768        _err(f"'default: {default}' is invalid for '{prop_name}' "
2769             f"in 'properties:' in {binding_path}, "
2770             f"which has type {prop_type}")
2771
2772
2773def _translate(addr: int, node: dtlib_Node) -> int:
2774    # Recursively translates 'addr' on 'node' to the address space(s) of its
2775    # parent(s), by looking at 'ranges' properties. Returns the translated
2776    # address.
2777
2778    if not node.parent or "ranges" not in node.parent.props:
2779        # No translation
2780        return addr
2781
2782    if not node.parent.props["ranges"].value:
2783        # DT spec.: "If the property is defined with an <empty> value, it
2784        # specifies that the parent and child address space is identical, and
2785        # no address translation is required."
2786        #
2787        # Treat this the same as a 'range' that explicitly does a one-to-one
2788        # mapping, as opposed to there not being any translation.
2789        return _translate(addr, node.parent)
2790
2791    # Gives the size of each component in a translation 3-tuple in 'ranges'
2792    child_address_cells = _address_cells(node)
2793    parent_address_cells = _address_cells(node.parent)
2794    child_size_cells = _size_cells(node)
2795
2796    # Number of cells for one translation 3-tuple in 'ranges'
2797    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2798
2799    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2800                            f"4*(<#address-cells> (= {child_address_cells}) + "
2801                            "<#address-cells for parent> "
2802                            f"(= {parent_address_cells}) + "
2803                            f"<#size-cells> (= {child_size_cells}))"):
2804        child_addr = to_num(raw_range[:4*child_address_cells])
2805        raw_range = raw_range[4*child_address_cells:]
2806
2807        parent_addr = to_num(raw_range[:4*parent_address_cells])
2808        raw_range = raw_range[4*parent_address_cells:]
2809
2810        child_len = to_num(raw_range)
2811
2812        if child_addr <= addr < child_addr + child_len:
2813            # 'addr' is within range of a translation in 'ranges'. Recursively
2814            # translate it and return the result.
2815            return _translate(parent_addr + addr - child_addr, node.parent)
2816
2817    # 'addr' is not within range of any translation in 'ranges'
2818    return addr
2819
2820
2821def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2822    # Helper for registering names from <foo>-names properties.
2823    #
2824    # node:
2825    #   Node which has a property that might need named elements.
2826    #
2827    # names-ident:
2828    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2829    #
2830    # objs:
2831    #   list of objects whose .name field should be set
2832
2833    full_names_ident = names_ident + "-names"
2834
2835    if full_names_ident in node.props:
2836        names = node.props[full_names_ident].to_strings()
2837        if len(names) != len(objs):
2838            _err(f"{full_names_ident} property in {node.path} "
2839                 f"in {node.dt.filename} has {len(names)} strings, "
2840                 f"expected {len(objs)} strings")
2841
2842        for obj, name in zip(objs, names):
2843            if obj is None:
2844                continue
2845            obj.name = name
2846    else:
2847        for obj in objs:
2848            if obj is not None:
2849                obj.name = None
2850
2851
2852def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2853    # Returns the node pointed at by the closest 'interrupt-parent', searching
2854    # the parents of 'node'. As of writing, this behavior isn't specified in
2855    # the DT spec., but seems to match what some .dts files except.
2856
2857    node: Optional[dtlib_Node] = start_node
2858
2859    while node:
2860        if "interrupt-parent" in node.props:
2861            return node.props["interrupt-parent"].to_node()
2862        node = node.parent
2863
2864    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2865         f"nor any of its parents has an 'interrupt-parent' property")
2866
2867
2868def _interrupts(node: dtlib_Node) -> list[tuple[dtlib_Node, bytes]]:
2869    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2870    # interrupt generated by 'node'. <controller> is the destination of the
2871    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2872    # the data associated with the interrupt (as a 'bytes' object).
2873
2874    # Takes precedence over 'interrupts' if both are present
2875    if "interrupts-extended" in node.props:
2876        prop = node.props["interrupts-extended"]
2877
2878        ret: list[tuple[dtlib_Node, bytes]] = []
2879        for entry in _phandle_val_list(prop, "interrupt"):
2880            if entry is None:
2881                _err(f"node '{node.path}' interrupts-extended property "
2882                     "has an empty element")
2883            iparent, spec = entry
2884            ret.append(_map_interrupt(node, iparent, spec))
2885        return ret
2886
2887    if "interrupts" in node.props:
2888        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2889        # the same interrupt parent for all interrupts
2890
2891        iparent = _interrupt_parent(node)
2892        interrupt_cells = _interrupt_cells(iparent)
2893
2894        return [_map_interrupt(node, iparent, raw)
2895                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2896                                  "4*<#interrupt-cells>")]
2897
2898    return []
2899
2900
2901def _map_interrupt(
2902        child: dtlib_Node,
2903        parent: dtlib_Node,
2904        child_spec: bytes
2905) -> tuple[dtlib_Node, bytes]:
2906    # Translates an interrupt headed from 'child' to 'parent' with data
2907    # 'child_spec' through any 'interrupt-map' properties. Returns a
2908    # (<controller>, <data>) tuple with the final destination after mapping.
2909
2910    if "interrupt-controller" in parent.props:
2911        return (parent, child_spec)
2912
2913    def own_address_cells(node):
2914        # Used for parents pointed at by 'interrupt-map'. We can't use
2915        # _address_cells(), because it's the #address-cells property on 'node'
2916        # itself that matters.
2917
2918        address_cells = node.props.get("#address-cells")
2919        if not address_cells:
2920            _err(f"missing #address-cells on {node!r} "
2921                 "(while handling interrupt-map)")
2922        return address_cells.to_num()
2923
2924    def spec_len_fn(node):
2925        # Can't use _address_cells() here, because it's the #address-cells
2926        # property on 'node' itself that matters
2927        return own_address_cells(node) + _interrupt_cells(node)
2928
2929    parent, raw_spec = _map(
2930        "interrupt", child, parent, _raw_unit_addr(child) + child_spec,
2931        spec_len_fn, require_controller=True)
2932
2933    # Strip the parent unit address part, if any
2934    return (parent, raw_spec[4*own_address_cells(parent):])
2935
2936
2937def _map_phandle_array_entry(
2938        child: dtlib_Node,
2939        parent: dtlib_Node,
2940        child_spec: bytes,
2941        basename: str
2942) -> tuple[dtlib_Node, bytes]:
2943    # Returns a (<controller>, <data>) tuple with the final destination after
2944    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
2945    # _map_interrupt().
2946
2947    def spec_len_fn(node):
2948        prop_name = f"#{basename}-cells"
2949        if prop_name not in node.props:
2950            _err(f"expected '{prop_name}' property on {node!r} "
2951                 f"(referenced by {child!r})")
2952        return node.props[prop_name].to_num()
2953
2954    # Do not require <prefix>-controller for anything but interrupts for now
2955    return _map(basename, child, parent, child_spec, spec_len_fn,
2956                require_controller=False)
2957
2958
2959def _map(
2960        prefix: str,
2961        child: dtlib_Node,
2962        parent: dtlib_Node,
2963        child_spec: bytes,
2964        spec_len_fn: Callable[[dtlib_Node], int],
2965        require_controller: bool
2966) -> tuple[dtlib_Node, bytes]:
2967    # Common code for mapping through <prefix>-map properties, e.g.
2968    # interrupt-map and gpio-map.
2969    #
2970    # prefix:
2971    #   The prefix, e.g. "interrupt" or "gpio"
2972    #
2973    # child:
2974    #   The "sender", e.g. the node with 'interrupts = <...>'
2975    #
2976    # parent:
2977    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
2978    #   'interrupt-controller' (no mapping)
2979    #
2980    # child_spec:
2981    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
2982    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
2983    #
2984    # spec_len_fn:
2985    #   Function called on a parent specified in a *-map property to get the
2986    #   length of the parent specifier (data after phandle in *-map), in cells
2987    #
2988    # require_controller:
2989    #   If True, the final controller node after mapping is required to have
2990    #   to have a <prefix>-controller property.
2991
2992    map_prop = parent.props.get(prefix + "-map")
2993    if not map_prop:
2994        if require_controller and prefix + "-controller" not in parent.props:
2995            _err(f"expected '{prefix}-controller' property on {parent!r} "
2996                 f"(referenced by {child!r})")
2997
2998        # No mapping
2999        return (parent, child_spec)
3000
3001    masked_child_spec = _mask(prefix, child, parent, child_spec)
3002
3003    raw = map_prop.value
3004    while raw:
3005        if len(raw) < len(child_spec):
3006            _err(f"bad value for {map_prop!r}, missing/truncated child data")
3007        child_spec_entry = raw[:len(child_spec)]
3008        raw = raw[len(child_spec):]
3009
3010        if len(raw) < 4:
3011            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
3012        phandle = to_num(raw[:4])
3013        raw = raw[4:]
3014
3015        # Parent specified in *-map
3016        map_parent = parent.dt.phandle2node.get(phandle)
3017        if not map_parent:
3018            _err(f"bad phandle ({phandle}) in {map_prop!r}")
3019
3020        map_parent_spec_len = 4*spec_len_fn(map_parent)
3021        if len(raw) < map_parent_spec_len:
3022            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
3023        parent_spec = raw[:map_parent_spec_len]
3024        raw = raw[map_parent_spec_len:]
3025
3026        # Got one *-map row. Check if it matches the child data.
3027        if child_spec_entry == masked_child_spec:
3028            # Handle *-map-pass-thru
3029            parent_spec = _pass_thru(
3030                prefix, child, parent, child_spec, parent_spec)
3031
3032            # Found match. Recursively map and return it.
3033            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
3034                        require_controller)
3035
3036    _err(f"child specifier for {child!r} ({child_spec!r}) "
3037         f"does not appear in {map_prop!r}")
3038
3039
3040def _mask(
3041        prefix: str,
3042        child: dtlib_Node,
3043        parent: dtlib_Node,
3044        child_spec: bytes
3045) -> bytes:
3046    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
3047    # See _map() for the parameters.
3048
3049    mask_prop = parent.props.get(prefix + "-map-mask")
3050    if not mask_prop:
3051        # No mask
3052        return child_spec
3053
3054    mask = mask_prop.value
3055    if len(mask) != len(child_spec):
3056        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
3057             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
3058
3059    return _and(child_spec, mask)
3060
3061
3062def _pass_thru(
3063        prefix: str,
3064        child: dtlib_Node,
3065        parent: dtlib_Node,
3066        child_spec: bytes,
3067        parent_spec: bytes
3068) -> bytes:
3069    # Common code for handling <prefix>-map-thru properties, e.g.
3070    # interrupt-pass-thru.
3071    #
3072    # parent_spec:
3073    #   The parent data from the matched entry in the <prefix>-map property
3074    #
3075    # See _map() for the other parameters.
3076
3077    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
3078    if not pass_thru_prop:
3079        # No pass-thru
3080        return parent_spec
3081
3082    pass_thru = pass_thru_prop.value
3083    if len(pass_thru) != len(child_spec):
3084        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
3085             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
3086
3087    res = _or(_and(child_spec, pass_thru),
3088              _and(parent_spec, _not(pass_thru)))
3089
3090    # Truncate to length of parent spec.
3091    return res[-len(parent_spec):]
3092
3093
3094def _raw_unit_addr(node: dtlib_Node) -> bytes:
3095    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
3096    # #address-cells) as a raw 'bytes'
3097
3098    if 'reg' not in node.props:
3099        _err(f"{node!r} lacks 'reg' property "
3100             "(needed for 'interrupt-map' unit address lookup)")
3101
3102    addr_len = 4*_address_cells(node)
3103
3104    if len(node.props['reg'].value) < addr_len:
3105        _err(f"{node!r} has too short 'reg' property "
3106             "(while doing 'interrupt-map' unit address lookup)")
3107
3108    return node.props['reg'].value[:addr_len]
3109
3110
3111def _and(b1: bytes, b2: bytes) -> bytes:
3112    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
3113    # with ones on the left if the lengths are not equal.
3114
3115    # Pad on the left, to equal length
3116    maxlen = max(len(b1), len(b2))
3117    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
3118                                       b2.rjust(maxlen, b'\xff')))
3119
3120
3121def _or(b1: bytes, b2: bytes) -> bytes:
3122    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3123    # zeros on the left if the lengths are not equal.
3124
3125    # Pad on the left, to equal length
3126    maxlen = max(len(b1), len(b2))
3127    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3128                                       b2.rjust(maxlen, b'\x00')))
3129
3130
3131def _not(b: bytes) -> bytes:
3132    # Returns the bitwise not of the 'bytes' object 'b'
3133
3134    # ANDing with 0xFF avoids negative numbers
3135    return bytes(~x & 0xFF for x in b)
3136
3137
3138def _phandle_val_list(
3139        prop: dtlib_Property,
3140        n_cells_name: str
3141) -> list[Optional[tuple[dtlib_Node, bytes]]]:
3142    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3143    # cells that make up each <value> is derived from the node pointed at by
3144    # the preceding <phandle>.
3145    #
3146    # prop:
3147    #   dtlib.Property with value to parse
3148    #
3149    # n_cells_name:
3150    #   The <name> part of the #<name>-cells property to look for on the nodes
3151    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3152    #
3153    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3154    # is the node pointed at by <phandle>. If <phandle> does not refer
3155    # to a node, the entire list element is None.
3156
3157    full_n_cells_name = f"#{n_cells_name}-cells"
3158
3159    res: list[Optional[tuple[dtlib_Node, bytes]]] = []
3160
3161    raw = prop.value
3162    while raw:
3163        if len(raw) < 4:
3164            # Not enough room for phandle
3165            _err("bad value for " + repr(prop))
3166        phandle = to_num(raw[:4])
3167        raw = raw[4:]
3168
3169        node = prop.node.dt.phandle2node.get(phandle)
3170        if not node:
3171            # Unspecified phandle-array element. This is valid; a 0
3172            # phandle value followed by no cells is an empty element.
3173            res.append(None)
3174            continue
3175
3176        if full_n_cells_name not in node.props:
3177            _err(f"{node!r} lacks {full_n_cells_name}")
3178
3179        n_cells = node.props[full_n_cells_name].to_num()
3180        if len(raw) < 4*n_cells:
3181            _err("missing data after phandle in " + repr(prop))
3182
3183        res.append((node, raw[:4*n_cells]))
3184        raw = raw[4*n_cells:]
3185
3186    return res
3187
3188
3189def _address_cells(node: dtlib_Node) -> int:
3190    # Returns the #address-cells setting for 'node', giving the number of <u32>
3191    # cells used to encode the address in the 'reg' property
3192    if TYPE_CHECKING:
3193        assert node.parent
3194
3195    if "#address-cells" in node.parent.props:
3196        return node.parent.props["#address-cells"].to_num()
3197    return 2  # Default value per DT spec.
3198
3199
3200def _size_cells(node: dtlib_Node) -> int:
3201    # Returns the #size-cells setting for 'node', giving the number of <u32>
3202    # cells used to encode the size in the 'reg' property
3203    if TYPE_CHECKING:
3204        assert node.parent
3205
3206    if "#size-cells" in node.parent.props:
3207        return node.parent.props["#size-cells"].to_num()
3208    return 1  # Default value per DT spec.
3209
3210
3211def _interrupt_cells(node: dtlib_Node) -> int:
3212    # Returns the #interrupt-cells property value on 'node', erroring out if
3213    # 'node' has no #interrupt-cells property
3214
3215    if "#interrupt-cells" not in node.props:
3216        _err(f"{node!r} lacks #interrupt-cells")
3217    return node.props["#interrupt-cells"].to_num()
3218
3219
3220def _slice(node: dtlib_Node,
3221           prop_name: str,
3222           size: int,
3223           size_hint: str) -> list[bytes]:
3224    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3225
3226
3227def _check_dt(dt: DT) -> None:
3228    # Does devicetree sanity checks. dtlib is meant to be general and
3229    # anything-goes except for very special properties like phandle, but in
3230    # edtlib we can be pickier.
3231
3232    # Check that 'status' has one of the values given in the devicetree spec.
3233
3234    # Accept "ok" for backwards compatibility
3235    ok_status = {"ok", "okay", "disabled", "reserved", "fail", "fail-sss"}
3236
3237    for node in dt.node_iter():
3238        if "status" in node.props:
3239            try:
3240                status_val = node.props["status"].to_string()
3241            except DTError as e:
3242                # The error message gives the path
3243                _err(str(e))
3244
3245            if status_val not in ok_status:
3246                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3247                     f"in {node.dt.filename}, expected one of " +
3248                     ", ".join(ok_status) +
3249                     " (see the devicetree specification)")
3250
3251        ranges_prop = node.props.get("ranges")
3252        if ranges_prop:
3253            if ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3254                _err(f"expected 'ranges = < ... >;' in {node.path} in "
3255                     f"{node.dt.filename}, not '{ranges_prop}' "
3256                     "(see the devicetree specification)")
3257
3258
3259def _err(msg) -> NoReturn:
3260    raise EDTError(msg)
3261
3262# Logging object
3263_LOG = logging.getLogger(__name__)
3264
3265# Regular expression for non-alphanumeric-or-underscore characters.
3266_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3267
3268
3269def str_as_token(val: str) -> str:
3270    """Return a canonical representation of a string as a C token.
3271
3272    This converts special characters in 'val' to underscores, and
3273    returns the result."""
3274
3275    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3276
3277
3278# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3279# which could interfere with YAML loading in clients
3280class _BindingLoader(Loader):
3281    pass
3282
3283
3284# Add legacy '!include foo.yaml' handling
3285_BindingLoader.add_constructor("!include", _binding_include)
3286
3287#
3288# "Default" binding for properties which are defined by the spec.
3289#
3290# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3291# updating the documentation for the DT_PROP() macro in
3292# include/devicetree.h.
3293#
3294
3295_DEFAULT_PROP_TYPES: dict[str, str] = {
3296    "compatible": "string-array",
3297    "status": "string",
3298    "ranges": "compound",  # NUMS or EMPTY
3299    "reg": "array",
3300    "reg-names": "string-array",
3301    "label": "string",
3302    "interrupts": "array",
3303    "interrupts-extended": "compound",
3304    "interrupt-names": "string-array",
3305    "interrupt-controller": "boolean",
3306}
3307
3308_STATUS_ENUM: list[str] = "ok okay disabled reserved fail fail-sss".split()
3309
3310def _raw_default_property_for(
3311        name: str
3312) -> dict[str, Union[str, bool, list[str]]]:
3313    ret: dict[str, Union[str, bool, list[str]]] = {
3314        'type': _DEFAULT_PROP_TYPES[name],
3315        'required': False,
3316    }
3317    if name == 'status':
3318        ret['enum'] = _STATUS_ENUM
3319    return ret
3320
3321_DEFAULT_PROP_BINDING: Binding = Binding(
3322    None, {},
3323    raw={
3324        'properties': {
3325            name: _raw_default_property_for(name)
3326            for name in _DEFAULT_PROP_TYPES
3327        },
3328    },
3329    require_compatible=False, require_description=False,
3330)
3331
3332_DEFAULT_PROP_SPECS: dict[str, PropertySpec] = {
3333    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3334    for name in _DEFAULT_PROP_TYPES
3335}
3336