1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# SPDX-License-Identifier: BSD-3-Clause
4
5# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
6
7"""
8Library for working with devicetrees at a higher level compared to dtlib. Like
9dtlib, this library presents a tree of devicetree nodes, but the nodes are
10augmented with information from bindings and include some interpretation of
11properties. Some of this interpretation is based on conventions established
12by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
13source code is sometimes good reference material.
14
15Bindings are YAML files that describe devicetree nodes. Devicetree
16nodes are usually mapped to bindings via their 'compatible = "..."' property,
17but a binding can also come from a 'child-binding:' key in the binding for the
18parent devicetree node.
19
20Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
21which has all the information related to the node.
22
23The top-level entry points for the library are the EDT and Binding classes.
24See their constructor docstrings for details. There is also a
25bindings_from_paths() helper function.
26"""
27
28# NOTE: tests/test_edtlib.py is the test suite for this library.
29
30# Implementation notes
31# --------------------
32#
33# A '_' prefix on an identifier in Python is a convention for marking it private.
34# Please do not access private things. Instead, think of what API you need, and
35# add it.
36#
37# This module is not meant to have any global state. It should be possible to
38# create several EDT objects with independent binding paths and flags. If you
39# need to add a configuration parameter or the like, store it in the EDT
40# instance, and initialize it e.g. with a constructor argument.
41#
42# This library is layered on top of dtlib, and is not meant to expose it to
43# clients. This keeps the header generation script simple.
44#
45# General biased advice:
46#
47# - Consider using @property for APIs that don't need parameters. It makes
48#   functions look like attributes, which is less awkward in clients, and makes
49#   it easy to switch back and forth between variables and functions.
50#
51# - Think about the data type of the thing you're exposing. Exposing something
52#   as e.g. a list or a dictionary is often nicer and more flexible than adding
53#   a function.
54#
55# - Avoid get_*() prefixes on functions. Name them after the thing they return
56#   instead. This often makes the code read more naturally in callers.
57#
58#   Also, consider using @property instead of get_*().
59#
60# - Don't expose dtlib stuff directly.
61#
62# - Add documentation for any new APIs you add.
63#
64#   The convention here is that docstrings (quoted strings) are used for public
65#   APIs, and "doc comments" for internal functions.
66#
67#   @properties are documented in the class docstring, as if they were
68#   variables. See the existing @properties for a template.
69
70from collections import defaultdict
71from copy import deepcopy
72from dataclasses import dataclass
73from typing import (Any, Callable, Dict, Iterable, List, NoReturn,
74                    Optional, Set, TYPE_CHECKING, Tuple, Union)
75import logging
76import os
77import re
78
79import yaml
80try:
81    # Use the C LibYAML parser if available, rather than the Python parser.
82    # This makes e.g. gen_defines.py more than twice as fast.
83    from yaml import CLoader as Loader
84except ImportError:
85    from yaml import Loader     # type: ignore
86
87from devicetree.dtlib import DT, DTError, to_num, to_nums, Type
88from devicetree.dtlib import Node as dtlib_Node
89from devicetree.dtlib import Property as dtlib_Property
90from devicetree.grutils import Graph
91from devicetree._private import _slice_helper
92
93#
94# Public classes
95#
96
97
98class Binding:
99    """
100    Represents a parsed binding.
101
102    These attributes are available on Binding objects:
103
104    path:
105      The absolute path to the file defining the binding.
106
107    description:
108      The free-form description of the binding, or None.
109
110    compatible:
111      The compatible string the binding matches.
112
113      This may be None. For example, it's None when the Binding is inferred
114      from node properties. It can also be None for Binding objects created
115      using 'child-binding:' with no compatible.
116
117    prop2specs:
118      A dict mapping property names to PropertySpec objects
119      describing those properties' values.
120
121    specifier2cells:
122      A dict that maps specifier space names (like "gpio",
123      "clock", "pwm", etc.) to lists of cell names.
124
125      For example, if the binding YAML contains 'pin' and 'flags' cell names
126      for the 'gpio' specifier space, like this:
127
128          gpio-cells:
129          - pin
130          - flags
131
132      Then the Binding object will have a 'specifier2cells' attribute mapping
133      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
134      cells.
135
136    raw:
137      The binding as an object parsed from YAML.
138
139    bus:
140      If nodes with this binding's 'compatible' describe a bus, a string
141      describing the bus type (like "i2c") or a list describing supported
142      protocols (like ["i3c", "i2c"]). None otherwise.
143
144      Note that this is the raw value from the binding where it can be
145      a string or a list. Use "buses" instead unless you need the raw
146      value, where "buses" is always a list.
147
148    buses:
149      Deprived property from 'bus' where 'buses' is a list of bus(es),
150      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
151      no 'bus:' in this binding.
152
153    on_bus:
154      If nodes with this binding's 'compatible' appear on a bus, a string
155      describing the bus type (like "i2c"). None otherwise.
156
157    child_binding:
158      If this binding describes the properties of child nodes, then
159      this is a Binding object for those children; it is None otherwise.
160      A Binding object's 'child_binding.child_binding' is not None if there
161      are multiple levels of 'child-binding' descriptions in the binding.
162    """
163
164    def __init__(self, path: Optional[str], fname2path: Dict[str, str],
165                 raw: Any = None, require_compatible: bool = True,
166                 require_description: bool = True):
167        """
168        Binding constructor.
169
170        path:
171          Path to binding YAML file. May be None.
172
173        fname2path:
174          Map from include files to their absolute paths. Must
175          not be None, but may be empty.
176
177        raw:
178          Optional raw content in the binding.
179          This does not have to have any "include:" lines resolved.
180          May be left out, in which case 'path' is opened and read.
181          This can be used to resolve child bindings, for example.
182
183        require_compatible:
184          If True, it is an error if the binding does not contain a
185          "compatible:" line. If False, a missing "compatible:" is
186          not an error. Either way, "compatible:" must be a string
187          if it is present in the binding.
188
189        require_description:
190          If True, it is an error if the binding does not contain a
191          "description:" line. If False, a missing "description:" is
192          not an error. Either way, "description:" must be a string
193          if it is present in the binding.
194        """
195        self.path: Optional[str] = path
196        self._fname2path: Dict[str, str] = fname2path
197
198        if raw is None:
199            if path is None:
200                _err("you must provide either a 'path' or a 'raw' argument")
201            with open(path, encoding="utf-8") as f:
202                raw = yaml.load(f, Loader=_BindingLoader)
203
204        # Merge any included files into self.raw. This also pulls in
205        # inherited child binding definitions, so it has to be done
206        # before initializing those.
207        self.raw: dict = self._merge_includes(raw, self.path)
208
209        # Recursively initialize any child bindings. These don't
210        # require a 'compatible' or 'description' to be well defined,
211        # but they must be dicts.
212        if "child-binding" in raw:
213            if not isinstance(raw["child-binding"], dict):
214                _err(f"malformed 'child-binding:' in {self.path}, "
215                     "expected a binding (dictionary with keys/values)")
216            self.child_binding: Optional['Binding'] = Binding(
217                path, fname2path,
218                raw=raw["child-binding"],
219                require_compatible=False,
220                require_description=False)
221        else:
222            self.child_binding = None
223
224        # Make sure this is a well defined object.
225        self._check(require_compatible, require_description)
226
227        # Initialize look up tables.
228        self.prop2specs: Dict[str, 'PropertySpec'] = {}
229        for prop_name in self.raw.get("properties", {}).keys():
230            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
231        self.specifier2cells: Dict[str, List[str]] = {}
232        for key, val in self.raw.items():
233            if key.endswith("-cells"):
234                self.specifier2cells[key[:-len("-cells")]] = val
235
236    def __repr__(self) -> str:
237        if self.compatible:
238            compat = f" for compatible '{self.compatible}'"
239        else:
240            compat = ""
241        basename = os.path.basename(self.path or "")
242        return f"<Binding {basename}" + compat + ">"
243
244    @property
245    def description(self) -> Optional[str]:
246        "See the class docstring"
247        return self.raw.get('description')
248
249    @property
250    def compatible(self) -> Optional[str]:
251        "See the class docstring"
252        return self.raw.get('compatible')
253
254    @property
255    def bus(self) -> Union[None, str, List[str]]:
256        "See the class docstring"
257        return self.raw.get('bus')
258
259    @property
260    def buses(self) -> List[str]:
261        "See the class docstring"
262        if self.raw.get('bus') is not None:
263            return self._buses
264        else:
265            return []
266
267    @property
268    def on_bus(self) -> Optional[str]:
269        "See the class docstring"
270        return self.raw.get('on-bus')
271
272    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
273        # Constructor helper. Merges included files in
274        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
275        # source of include files, removing the "include" key while
276        # doing so.
277        #
278        # This treats 'binding_path' as the binding file being built up
279        # and uses it for error messages.
280
281        if "include" not in raw:
282            return raw
283
284        include = raw.pop("include")
285
286        # First, merge the included files together. If more than one included
287        # file has a 'required:' for a particular property, OR the values
288        # together, so that 'required: true' wins.
289
290        merged: Dict[str, Any] = {}
291
292        if isinstance(include, str):
293            # Simple scalar string case
294            _merge_props(merged, self._load_raw(include), None, binding_path,
295                         False)
296        elif isinstance(include, list):
297            # List of strings and maps. These types may be intermixed.
298            for elem in include:
299                if isinstance(elem, str):
300                    _merge_props(merged, self._load_raw(elem), None,
301                                 binding_path, False)
302                elif isinstance(elem, dict):
303                    name = elem.pop('name', None)
304                    allowlist = elem.pop('property-allowlist', None)
305                    blocklist = elem.pop('property-blocklist', None)
306                    child_filter = elem.pop('child-binding', None)
307
308                    if elem:
309                        # We've popped out all the valid keys.
310                        _err(f"'include:' in {binding_path} should not have "
311                             f"these unexpected contents: {elem}")
312
313                    _check_include_dict(name, allowlist, blocklist,
314                                        child_filter, binding_path)
315
316                    contents = self._load_raw(name)
317
318                    _filter_properties(contents, allowlist, blocklist,
319                                       child_filter, binding_path)
320                    _merge_props(merged, contents, None, binding_path, False)
321                else:
322                    _err(f"all elements in 'include:' in {binding_path} "
323                         "should be either strings or maps with a 'name' key "
324                         "and optional 'property-allowlist' or "
325                         f"'property-blocklist' keys, but got: {elem}")
326        else:
327            # Invalid item.
328            _err(f"'include:' in {binding_path} "
329                 f"should be a string or list, but has type {type(include)}")
330
331        # Next, merge the merged included files into 'raw'. Error out if
332        # 'raw' has 'required: false' while the merged included files have
333        # 'required: true'.
334
335        _merge_props(raw, merged, None, binding_path, check_required=True)
336
337        return raw
338
339    def _load_raw(self, fname: str) -> dict:
340        # Returns the contents of the binding given by 'fname' after merging
341        # any bindings it lists in 'include:' into it. 'fname' is just the
342        # basename of the file, so we check that there aren't multiple
343        # candidates.
344
345        path = self._fname2path.get(fname)
346
347        if not path:
348            _err(f"'{fname}' not found")
349
350        with open(path, encoding="utf-8") as f:
351            contents = yaml.load(f, Loader=_BindingLoader)
352            if not isinstance(contents, dict):
353                _err(f'{path}: invalid contents, expected a mapping')
354
355        return self._merge_includes(contents, path)
356
357    def _check(self, require_compatible: bool, require_description: bool):
358        # Does sanity checking on the binding.
359
360        raw = self.raw
361
362        if "compatible" in raw:
363            compatible = raw["compatible"]
364            if not isinstance(compatible, str):
365                _err(f"malformed 'compatible: {compatible}' "
366                     f"field in {self.path} - "
367                     f"should be a string, not {type(compatible).__name__}")
368        elif require_compatible:
369            _err(f"missing 'compatible' in {self.path}")
370
371        if "description" in raw:
372            description = raw["description"]
373            if not isinstance(description, str) or not description:
374                _err(f"malformed or empty 'description' in {self.path}")
375        elif require_description:
376            _err(f"missing 'description' in {self.path}")
377
378        # Allowed top-level keys. The 'include' key should have been
379        # removed by _load_raw() already.
380        ok_top = {"description", "compatible", "bus", "on-bus",
381                  "properties", "child-binding"}
382
383        # Descriptive errors for legacy bindings.
384        legacy_errors = {
385            "#cells": "expected *-cells syntax",
386            "child": "use 'bus: <bus>' instead",
387            "child-bus": "use 'bus: <bus>' instead",
388            "parent": "use 'on-bus: <bus>' instead",
389            "parent-bus": "use 'on-bus: <bus>' instead",
390            "sub-node": "use 'child-binding' instead",
391            "title": "use 'description' instead",
392        }
393
394        for key in raw:
395            if key in legacy_errors:
396                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
397
398            if key not in ok_top and not key.endswith("-cells"):
399                _err(f"unknown key '{key}' in {self.path}, "
400                     "expected one of {', '.join(ok_top)}, or *-cells")
401
402        if "bus" in raw:
403            bus = raw["bus"]
404            if (not isinstance(bus, str) and
405               (not isinstance(bus, list) and
406                not all(isinstance(elem, str) for elem in bus))):
407                _err(f"malformed 'bus:' value in {self.path}, "
408                     "expected string or list of strings")
409
410            if isinstance(bus, list):
411                self._buses = bus
412            else:
413                # Convert bus into a list
414                self._buses = [bus]
415
416        if ("on-bus" in raw
417            and not isinstance(raw["on-bus"], str)):
418            _err(f"malformed 'on-bus:' value in {self.path}, "
419                 "expected string")
420
421        self._check_properties()
422
423        for key, val in raw.items():
424            if key.endswith("-cells"):
425                if (not isinstance(val, list)
426                    or not all(isinstance(elem, str) for elem in val)):
427                    _err(f"malformed '{key}:' in {self.path}, "
428                         "expected a list of strings")
429
430    def _check_properties(self) -> None:
431        # _check() helper for checking the contents of 'properties:'.
432
433        raw = self.raw
434
435        if "properties" not in raw:
436            return
437
438        ok_prop_keys = {"description", "type", "required",
439                        "enum", "const", "default", "deprecated",
440                        "specifier-space"}
441
442        for prop_name, options in raw["properties"].items():
443            for key in options:
444                if key not in ok_prop_keys:
445                    _err(f"unknown setting '{key}' in "
446                         f"'properties: {prop_name}: ...' in {self.path}, "
447                         f"expected one of {', '.join(ok_prop_keys)}")
448
449            _check_prop_by_type(prop_name, options, self.path)
450
451            for true_false_opt in ["required", "deprecated"]:
452                if true_false_opt in options:
453                    option = options[true_false_opt]
454                    if not isinstance(option, bool):
455                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
456                             f"for '{prop_name}' in 'properties' in {self.path}, "
457                             "expected true/false")
458
459            if options.get("deprecated") and options.get("required"):
460                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
461                      "have both 'deprecated' and 'required' set")
462
463            if ("description" in options
464                and not isinstance(options["description"], str)):
465                _err("missing, malformed, or empty 'description' for "
466                     f"'{prop_name}' in 'properties' in {self.path}")
467
468            if "enum" in options and not isinstance(options["enum"], list):
469                _err(f"enum in {self.path} for property '{prop_name}' "
470                     "is not a list")
471
472
473class PropertySpec:
474    """
475    Represents a "property specification", i.e. the description of a
476    property provided by a binding file, like its type and description.
477
478    These attributes are available on PropertySpec objects:
479
480    binding:
481      The Binding object which defined this property.
482
483    name:
484      The property's name.
485
486    path:
487      The file where this property was defined. In case a binding includes
488      other bindings, this is the file where the property was last modified.
489
490    type:
491      The type of the property as a string, as given in the binding.
492
493    description:
494      The free-form description of the property as a string, or None.
495
496    enum:
497      A list of values the property may take as given in the binding, or None.
498
499    enum_tokenizable:
500      True if enum is not None and all the values in it are tokenizable;
501      False otherwise.
502
503      A property must have string or string-array type and an "enum:" in its
504      binding to be tokenizable. Additionally, the "enum:" values must be
505      unique after converting all non-alphanumeric characters to underscores
506      (so "foo bar" and "foo_bar" in the same "enum:" would not be
507      tokenizable).
508
509    enum_upper_tokenizable:
510      Like 'enum_tokenizable', with the additional restriction that the
511      "enum:" values must be unique after uppercasing and converting
512      non-alphanumeric characters to underscores.
513
514    const:
515      The property's constant value as given in the binding, or None.
516
517    default:
518      The property's default value as given in the binding, or None.
519
520    deprecated:
521      True if the property is deprecated; False otherwise.
522
523    required:
524      True if the property is marked required; False otherwise.
525
526    specifier_space:
527      The specifier space for the property as given in the binding, or None.
528    """
529
530    def __init__(self, name: str, binding: Binding):
531        self.binding: Binding = binding
532        self.name: str = name
533        self._raw: Dict[str, Any] = self.binding.raw["properties"][name]
534
535    def __repr__(self) -> str:
536        return f"<PropertySpec {self.name} type '{self.type}'>"
537
538    @property
539    def path(self) -> Optional[str]:
540        "See the class docstring"
541        return self.binding.path
542
543    @property
544    def type(self) -> str:
545        "See the class docstring"
546        return self._raw["type"]
547
548    @property
549    def description(self) -> Optional[str]:
550        "See the class docstring"
551        return self._raw.get("description")
552
553    @property
554    def enum(self) -> Optional[list]:
555        "See the class docstring"
556        return self._raw.get("enum")
557
558    @property
559    def enum_tokenizable(self) -> bool:
560        "See the class docstring"
561        if not hasattr(self, '_enum_tokenizable'):
562            if self.type not in {'string', 'string-array'} or self.enum is None:
563                self._enum_tokenizable = False
564            else:
565                # Saving _as_tokens here lets us reuse it in
566                # enum_upper_tokenizable.
567                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
568                                          '_', value)
569                                   for value in self.enum]
570                self._enum_tokenizable = (len(self._as_tokens) ==
571                                          len(set(self._as_tokens)))
572
573        return self._enum_tokenizable
574
575    @property
576    def enum_upper_tokenizable(self) -> bool:
577        "See the class docstring"
578        if not hasattr(self, '_enum_upper_tokenizable'):
579            if not self.enum_tokenizable:
580                self._enum_upper_tokenizable = False
581            else:
582                self._enum_upper_tokenizable = (
583                    len(self._as_tokens) == len(
584                        set(x.upper() for x in self._as_tokens)
585                    ))
586        return self._enum_upper_tokenizable
587
588    @property
589    def const(self) -> Union[None, int, List[int], str, List[str]]:
590        "See the class docstring"
591        return self._raw.get("const")
592
593    @property
594    def default(self) -> Union[None, int, List[int], str, List[str]]:
595        "See the class docstring"
596        return self._raw.get("default")
597
598    @property
599    def required(self) -> bool:
600        "See the class docstring"
601        return self._raw.get("required", False)
602
603    @property
604    def deprecated(self) -> bool:
605        "See the class docstring"
606        return self._raw.get("deprecated", False)
607
608    @property
609    def specifier_space(self) -> Optional[str]:
610        "See the class docstring"
611        return self._raw.get("specifier-space")
612
613PropertyValType = Union[int, str,
614                        List[int], List[str],
615                        'Node', List['Node'],
616                        List[Optional['ControllerAndData']],
617                        bytes, None]
618
619
620@dataclass
621class Property:
622    """
623    Represents a property on a Node, as set in its DT node and with
624    additional info from the 'properties:' section of the binding.
625
626    Only properties mentioned in 'properties:' get created. Properties of type
627    'compound' currently do not get Property instances, as it's not clear
628    what to generate for them.
629
630    These attributes are available on Property objects. Several are
631    just convenience accessors for attributes on the PropertySpec object
632    accessible via the 'spec' attribute.
633
634    These attributes are available on Property objects:
635
636    spec:
637      The PropertySpec object which specifies this property.
638
639    val:
640      The value of the property, with the format determined by spec.type,
641      which comes from the 'type:' string in the binding.
642
643        - For 'type: int/array/string/string-array', 'val' is what you'd expect
644          (a Python integer or string, or a list of them)
645
646        - For 'type: uint8-array', 'val' is a bytes object
647
648        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
649          instance
650
651        - For 'type: phandles', 'val' is a list of the pointed-to Node
652          instances
653
654        - For 'type: phandle-array', 'val' is a list of ControllerAndData
655          instances. See the documentation for that class.
656
657    node:
658      The Node instance the property is on
659
660    name:
661      Convenience for spec.name.
662
663    description:
664      Convenience for spec.description with leading and trailing whitespace
665      (including newlines) removed. May be None.
666
667    type:
668      Convenience for spec.type.
669
670    val_as_tokens:
671      The value of the property as a list of tokens, i.e. with non-alphanumeric
672      characters replaced with underscores. This is only safe to access
673      if 'spec.enum_tokenizable' returns True.
674
675    enum_indices:
676      A list of indices of 'val' in 'spec.enum' (which comes from the 'enum:'
677      list in the binding), or None if spec.enum is None.
678    """
679
680    spec: PropertySpec
681    val: PropertyValType
682    node: 'Node'
683
684    @property
685    def name(self) -> str:
686        "See the class docstring"
687        return self.spec.name
688
689    @property
690    def description(self) -> Optional[str]:
691        "See the class docstring"
692        return self.spec.description.strip() if self.spec.description else None
693
694    @property
695    def type(self) -> str:
696        "See the class docstring"
697        return self.spec.type
698
699    @property
700    def val_as_tokens(self) -> List[str]:
701        "See the class docstring"
702        ret = []
703        for subval in self.val if isinstance(self.val, list) else [self.val]:
704            assert isinstance(subval, str)
705            ret.append(str_as_token(subval))
706        return ret
707
708    @property
709    def enum_indices(self) -> Optional[List[int]]:
710        "See the class docstring"
711        enum = self.spec.enum
712        val = self.val if isinstance(self.val, list) else [self.val]
713        return [enum.index(subval) for subval in val] if enum else None
714
715
716@dataclass
717class Register:
718    """
719    Represents a register on a node.
720
721    These attributes are available on Register objects:
722
723    node:
724      The Node instance this register is from
725
726    name:
727      The name of the register as given in the 'reg-names' property, or None if
728      there is no 'reg-names' property
729
730    addr:
731      The starting address of the register, in the parent address space, or None
732      if #address-cells is zero. Any 'ranges' properties are taken into account.
733
734    size:
735      The length of the register in bytes
736    """
737
738    node: 'Node'
739    name: Optional[str]
740    addr: Optional[int]
741    size: Optional[int]
742
743
744@dataclass
745class Range:
746    """
747    Represents a translation range on a node as described by the 'ranges' property.
748
749    These attributes are available on Range objects:
750
751    node:
752      The Node instance this range is from
753
754    child_bus_cells:
755      The number of cells used to describe a child bus address.
756
757    child_bus_addr:
758      A physical address within the child bus address space, or None if the
759      child's #address-cells equals 0.
760
761    parent_bus_cells:
762      The number of cells used to describe a parent bus address.
763
764    parent_bus_addr:
765      A physical address within the parent bus address space, or None if the
766      parent's #address-cells equals 0.
767
768    length_cells:
769      The number of cells used to describe the size of range in
770      the child's address space.
771
772    length:
773      The size of the range in the child address space, or None if the
774      child's #size-cells equals 0.
775    """
776    node: 'Node'
777    child_bus_cells: int
778    child_bus_addr: Optional[int]
779    parent_bus_cells: int
780    parent_bus_addr: Optional[int]
781    length_cells: int
782    length: Optional[int]
783
784
785@dataclass
786class ControllerAndData:
787    """
788    Represents an entry in an 'interrupts' or 'type: phandle-array' property
789    value, e.g. <&ctrl-1 4 0> in
790
791        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
792
793    These attributes are available on ControllerAndData objects:
794
795    node:
796      The Node instance the property appears on
797
798    controller:
799      The Node instance for the controller (e.g. the controller the interrupt
800      gets sent to for interrupts)
801
802    data:
803      A dictionary that maps names from the *-cells key in the binding for the
804      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
805      above.
806
807      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
808
809    name:
810      The name of the entry as given in
811      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
812      *-names property
813
814    basename:
815      Basename for the controller when supporting named cells
816    """
817    node: 'Node'
818    controller: 'Node'
819    data: dict
820    name: Optional[str]
821    basename: Optional[str]
822
823
824@dataclass
825class PinCtrl:
826    """
827    Represents a pin control configuration for a set of pins on a device,
828    e.g. pinctrl-0 or pinctrl-1.
829
830    These attributes are available on PinCtrl objects:
831
832    node:
833      The Node instance the pinctrl-* property is on
834
835    name:
836      The name of the configuration, as given in pinctrl-names, or None if
837      there is no pinctrl-names property
838
839    name_as_token:
840      Like 'name', but with non-alphanumeric characters converted to underscores.
841
842    conf_nodes:
843      A list of Node instances for the pin configuration nodes, e.g.
844      the nodes pointed at by &state_1 and &state_2 in
845
846          pinctrl-0 = <&state_1 &state_2>;
847    """
848
849    node: 'Node'
850    name: Optional[str]
851    conf_nodes: List['Node']
852
853    @property
854    def name_as_token(self):
855        "See the class docstring"
856        return str_as_token(self.name) if self.name is not None else None
857
858
859class Node:
860    """
861    Represents a devicetree node, augmented with information from bindings, and
862    with some interpretation of devicetree properties. There's a one-to-one
863    correspondence between devicetree nodes and Nodes.
864
865    These attributes are available on Node objects:
866
867    edt:
868      The EDT instance this node is from
869
870    name:
871      The name of the node
872
873    unit_addr:
874      An integer with the ...@<unit-address> portion of the node name,
875      translated through any 'ranges' properties on parent nodes, or None if
876      the node name has no unit-address portion. PCI devices use a different
877      node name format ...@<dev>,<func> or ...@<dev> (e.g. "pcie@1,0"), in
878      this case None is returned.
879
880    description:
881      The description string from the binding for the node, or None if the node
882      has no binding. Leading and trailing whitespace (including newlines) is
883      removed.
884
885    path:
886      The devicetree path of the node
887
888    label:
889      The text from the 'label' property on the node, or None if the node has
890      no 'label'
891
892    labels:
893      A list of all of the devicetree labels for the node, in the same order
894      as the labels appear, but with duplicates removed.
895
896      This corresponds to the actual devicetree source labels, unlike the
897      "label" attribute, which is the value of a devicetree property named
898      "label".
899
900    parent:
901      The Node instance for the devicetree parent of the Node, or None if the
902      node is the root node
903
904    children:
905      A dictionary with the Node instances for the devicetree children of the
906      node, indexed by name
907
908    dep_ordinal:
909      A non-negative integer value such that the value for a Node is
910      less than the value for all Nodes that depend on it.
911
912      The ordinal is defined for all Nodes, and is unique among nodes in its
913      EDT 'nodes' list.
914
915    required_by:
916      A list with the nodes that directly depend on the node
917
918    depends_on:
919      A list with the nodes that the node directly depends on
920
921    status:
922      The node's status property value, as a string, or "okay" if the node
923      has no status property set. If the node's status property is "ok",
924      it is converted to "okay" for consistency.
925
926    read_only:
927      True if the node has a 'read-only' property, and False otherwise
928
929    matching_compat:
930      The 'compatible' string for the binding that matched the node, or None if
931      the node has no binding
932
933    binding_path:
934      The path to the binding file for the node, or None if the node has no
935      binding
936
937    compats:
938      A list of 'compatible' strings for the node, in the same order that
939      they're listed in the .dts file
940
941    ranges:
942      A list of Range objects extracted from the node's ranges property.
943      The list is empty if the node does not have a range property.
944
945    regs:
946      A list of Register objects for the node's registers
947
948    props:
949      A dict that maps property names to Property objects.
950      Property objects are created for all devicetree properties on the node
951      that are mentioned in 'properties:' in the binding.
952
953    aliases:
954      A list of aliases for the node. This is fetched from the /aliases node.
955
956    interrupts:
957      A list of ControllerAndData objects for the interrupts generated by the
958      node. The list is empty if the node does not generate interrupts.
959
960    pinctrls:
961      A list of PinCtrl objects for the pinctrl-<index> properties on the
962      node, sorted by index. The list is empty if the node does not have any
963      pinctrl-<index> properties.
964
965    buses:
966      If the node is a bus node (has a 'bus:' key in its binding), then this
967      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
968      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
969      If the node is not a bus node, then this attribute is an empty list.
970
971    on_buses:
972      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
973      multiple protocols are supported via the same bus. The bus is determined
974      by searching upwards for a parent node whose binding has a 'bus:' key,
975      returning the value of the first 'bus:' key found. If none of the node's
976      parents has a 'bus:' key, this attribute is an empty list.
977
978    bus_node:
979      Like on_bus, but contains the Node for the bus controller, or None if the
980      node is not on a bus.
981
982    flash_controller:
983      The flash controller for the node. Only meaningful for nodes representing
984      flash partitions.
985
986    spi_cs_gpio:
987      The device's SPI GPIO chip select as a ControllerAndData instance, if it
988      exists, and None otherwise. See
989      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
990
991    gpio_hogs:
992      A list of ControllerAndData objects for the GPIOs hogged by the node. The
993      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
994      nodes.
995
996    is_pci_device:
997      True if the node is a PCI device.
998    """
999
1000    def __init__(
1001        self,
1002        dt_node: dtlib_Node,
1003        edt: "EDT",
1004        support_fixed_partitions_on_any_bus: bool = True,
1005    ):
1006        '''
1007        For internal use only; not meant to be used outside edtlib itself.
1008        '''
1009
1010        compats = (
1011            dt_node.props["compatible"].to_strings()
1012            if "compatible" in dt_node.props
1013            else []
1014        )
1015
1016        # Private, don't touch outside the class:
1017        self._node: dtlib_Node = dt_node
1018        self._binding: Optional[Binding] = None
1019
1020        # Public, some of which are initialized properly later:
1021        self.edt: 'EDT' = edt
1022        self.dep_ordinal: int = -1
1023        self.compats: List[str] = compats
1024        self.ranges: List[Range] = []
1025        self.regs: List[Register] = []
1026        self.props: Dict[str, Property] = {}
1027        self.interrupts: List[ControllerAndData] = []
1028        self.pinctrls: List[PinCtrl] = []
1029        self.bus_node = self._bus_node(support_fixed_partitions_on_any_bus)
1030
1031        self._init_binding()
1032        self._init_regs()
1033        self._init_ranges()
1034
1035    @property
1036    def name(self) -> str:
1037        "See the class docstring"
1038        return self._node.name
1039
1040    @property
1041    def unit_addr(self) -> Optional[int]:
1042        "See the class docstring"
1043
1044        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1045
1046        # PCI devices use a different node name format (e.g. "pcie@1,0")
1047        if "@" not in self.name or self.is_pci_device:
1048            return None
1049
1050        try:
1051            addr = int(self.name.split("@", 1)[1], 16)
1052        except ValueError:
1053            _err(f"{self!r} has non-hex unit address")
1054
1055        return _translate(addr, self._node)
1056
1057    @property
1058    def description(self) -> Optional[str]:
1059        "See the class docstring."
1060        if self._binding:
1061            return self._binding.description
1062        return None
1063
1064    @property
1065    def path(self) ->  str:
1066        "See the class docstring"
1067        return self._node.path
1068
1069    @property
1070    def label(self) -> Optional[str]:
1071        "See the class docstring"
1072        if "label" in self._node.props:
1073            return self._node.props["label"].to_string()
1074        return None
1075
1076    @property
1077    def labels(self) -> List[str]:
1078        "See the class docstring"
1079        return self._node.labels
1080
1081    @property
1082    def parent(self) -> Optional['Node']:
1083        "See the class docstring"
1084        return self.edt._node2enode.get(self._node.parent) # type: ignore
1085
1086    @property
1087    def children(self) -> Dict[str, 'Node']:
1088        "See the class docstring"
1089        # Could be initialized statically too to preserve identity, but not
1090        # sure if needed. Parent nodes being initialized before their children
1091        # would need to be kept in mind.
1092        return {name: self.edt._node2enode[node]
1093                for name, node in self._node.nodes.items()}
1094
1095    def child_index(self, node) -> int:
1096        """Get the index of *node* in self.children.
1097        Raises KeyError if the argument is not a child of this node.
1098        """
1099        if not hasattr(self, '_child2index'):
1100            # Defer initialization of this lookup table until this
1101            # method is callable to handle parents needing to be
1102            # initialized before their chidlren. By the time we
1103            # return from __init__, 'self.children' is callable.
1104            self._child2index: Dict[str, int] = {}
1105            for index, child_path in enumerate(child.path for child in
1106                                               self.children.values()):
1107                self._child2index[child_path] = index
1108
1109        return self._child2index[node.path]
1110
1111    @property
1112    def required_by(self) -> List['Node']:
1113        "See the class docstring"
1114        return self.edt._graph.required_by(self)
1115
1116    @property
1117    def depends_on(self) -> List['Node']:
1118        "See the class docstring"
1119        return self.edt._graph.depends_on(self)
1120
1121    @property
1122    def status(self) -> str:
1123        "See the class docstring"
1124        status = self._node.props.get("status")
1125
1126        if status is None:
1127            as_string = "okay"
1128        else:
1129            as_string = status.to_string()
1130
1131        if as_string == "ok":
1132            as_string = "okay"
1133
1134        return as_string
1135
1136    @property
1137    def read_only(self) -> bool:
1138        "See the class docstring"
1139        return "read-only" in self._node.props
1140
1141    @property
1142    def matching_compat(self) -> Optional[str]:
1143        "See the class docstring"
1144        if self._binding:
1145            return self._binding.compatible
1146        return None
1147
1148    @property
1149    def binding_path(self) -> Optional[str]:
1150        "See the class docstring"
1151        if self._binding:
1152            return self._binding.path
1153        return None
1154
1155    @property
1156    def aliases(self) -> List[str]:
1157        "See the class docstring"
1158        return [alias for alias, node in self._node.dt.alias2node.items()
1159                if node is self._node]
1160
1161    @property
1162    def buses(self) -> List[str]:
1163        "See the class docstring"
1164        if self._binding:
1165            return self._binding.buses
1166        return []
1167
1168    @property
1169    def on_buses(self) -> List[str]:
1170        "See the class docstring"
1171        bus_node = self.bus_node
1172        return bus_node.buses if bus_node else []
1173
1174    @property
1175    def flash_controller(self) -> 'Node':
1176        "See the class docstring"
1177
1178        # The node path might be something like
1179        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1180        # up two levels to get the flash and check its compat. The flash
1181        # controller might be the flash itself (for cases like NOR flashes).
1182        # For the case of 'soc-nv-flash', we assume the controller is the
1183        # parent of the flash node.
1184
1185        if not self.parent or not self.parent.parent:
1186            _err(f"flash partition {self!r} lacks parent or grandparent node")
1187
1188        controller = self.parent.parent
1189        if controller.matching_compat == "soc-nv-flash":
1190            if controller.parent is None:
1191                _err(f"flash controller '{controller.path}' cannot be the root node")
1192            return controller.parent
1193        return controller
1194
1195    @property
1196    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1197        "See the class docstring"
1198
1199        if not ("spi" in self.on_buses
1200                and self.bus_node
1201                and "cs-gpios" in self.bus_node.props):
1202            return None
1203
1204        if not self.regs:
1205            _err(f"{self!r} needs a 'reg' property, to look up the "
1206                 "chip select index for SPI")
1207
1208        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1209        if TYPE_CHECKING:
1210            assert isinstance(parent_cs_lst, list)
1211
1212        # cs-gpios is indexed by the unit address
1213        cs_index = self.regs[0].addr
1214        if TYPE_CHECKING:
1215            assert isinstance(cs_index, int)
1216
1217        if cs_index >= len(parent_cs_lst):
1218            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1219                 "is >= number of cs-gpios in "
1220                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1221
1222        ret = parent_cs_lst[cs_index]
1223        if TYPE_CHECKING:
1224            assert isinstance(ret, ControllerAndData)
1225        return ret
1226
1227    @property
1228    def gpio_hogs(self) -> List[ControllerAndData]:
1229        "See the class docstring"
1230
1231        if "gpio-hog" not in self.props:
1232            return []
1233
1234        if not self.parent or not "gpio-controller" in self.parent.props:
1235            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1236
1237        if not "#gpio-cells" in self.parent._node.props:
1238            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1239
1240        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1241        res = []
1242
1243        for item in _slice(self._node, "gpios", 4*n_cells,
1244                           f"4*(<#gpio-cells> (= {n_cells})"):
1245            controller = self.parent
1246            res.append(ControllerAndData(
1247                node=self, controller=controller,
1248                data=self._named_cells(controller, item, "gpio"),
1249                name=None, basename="gpio"))
1250
1251        return res
1252
1253    @property
1254    def has_child_binding(self) -> bool:
1255        """
1256        True if the node's binding contains a child-binding definition, False
1257        otherwise
1258        """
1259        return bool(self._binding and self._binding.child_binding)
1260
1261    @property
1262    def is_pci_device(self) -> bool:
1263        "See the class docstring"
1264        return 'pcie' in self.on_buses
1265
1266    def __repr__(self) -> str:
1267        if self.binding_path:
1268            binding = "binding " + self.binding_path
1269        else:
1270            binding = "no binding"
1271        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1272
1273    def _init_binding(self) -> None:
1274        # Initializes Node._binding. It holds data from the node's binding file,
1275        # in the format returned by PyYAML (plain Python lists, dicts, etc.), or
1276        # None if the node has no binding.
1277
1278        # This relies on the parent of the node having already been
1279        # initialized, which is guaranteed by going through the nodes in
1280        # node_iter() order.
1281
1282        if self.path in self.edt._infer_binding_for_paths:
1283            self._binding_from_properties()
1284            return
1285
1286        if self.compats:
1287            on_buses = self.on_buses
1288
1289            for compat in self.compats:
1290                # When matching, respect the order of the 'compatible' entries,
1291                # and for each one first try to match against an explicitly
1292                # specified bus (if any) and then against any bus. This is so
1293                # that matching against bindings which do not specify a bus
1294                # works the same way in Zephyr as it does elsewhere.
1295                binding = None
1296
1297                for bus in on_buses:
1298                    if (compat, bus) in self.edt._compat2binding:
1299                        binding = self.edt._compat2binding[compat, bus]
1300                        break
1301
1302                if not binding:
1303                    if (compat, None) in self.edt._compat2binding:
1304                        binding = self.edt._compat2binding[compat, None]
1305                    else:
1306                        continue
1307
1308                self._binding = binding
1309                return
1310        else:
1311            # No 'compatible' property. See if the parent binding has
1312            # a compatible. This can come from one or more levels of
1313            # nesting with 'child-binding:'.
1314
1315            binding_from_parent = self._binding_from_parent()
1316            if binding_from_parent:
1317                self._binding = binding_from_parent
1318                return
1319
1320        # No binding found
1321        self._binding = None
1322
1323    def _binding_from_properties(self) -> None:
1324        # Sets up a Binding object synthesized from the properties in the node.
1325
1326        if self.compats:
1327            _err(f"compatible in node with inferred binding: {self.path}")
1328
1329        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1330        raw: Dict[str, Any] = {
1331            'description': 'Inferred binding from properties, via edtlib.',
1332            'properties': {},
1333        }
1334        for name, prop in self._node.props.items():
1335            pp: Dict[str, str] = {}
1336            if prop.type == Type.EMPTY:
1337                pp["type"] = "boolean"
1338            elif prop.type == Type.BYTES:
1339                pp["type"] = "uint8-array"
1340            elif prop.type == Type.NUM:
1341                pp["type"] = "int"
1342            elif prop.type == Type.NUMS:
1343                pp["type"] = "array"
1344            elif prop.type == Type.STRING:
1345                pp["type"] = "string"
1346            elif prop.type == Type.STRINGS:
1347                pp["type"] = "string-array"
1348            elif prop.type == Type.PHANDLE:
1349                pp["type"] = "phandle"
1350            elif prop.type == Type.PHANDLES:
1351                pp["type"] = "phandles"
1352            elif prop.type == Type.PHANDLES_AND_NUMS:
1353                pp["type"] = "phandle-array"
1354            elif prop.type == Type.PATH:
1355                pp["type"] = "path"
1356            else:
1357                _err(f"cannot infer binding from property: {prop} "
1358                     f"with type {prop.type!r}")
1359            raw['properties'][name] = pp
1360
1361        # Set up Node state.
1362        self.compats = []
1363        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1364
1365    def _binding_from_parent(self) -> Optional[Binding]:
1366        # Returns the binding from 'child-binding:' in the parent node's
1367        # binding.
1368
1369        if not self.parent:
1370            return None
1371
1372        pbinding = self.parent._binding
1373        if not pbinding:
1374            return None
1375
1376        if pbinding.child_binding:
1377            return pbinding.child_binding
1378
1379        return None
1380
1381    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1382                  ) -> Optional['Node']:
1383        # Returns the value for self.bus_node. Relies on parent nodes being
1384        # initialized before their children.
1385
1386        if not self.parent:
1387            # This is the root node
1388            return None
1389
1390        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1391        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1392        # bus be None means we'll always match the binding for fixed-partitions
1393        # also this means want processing the fixed-partitions node we wouldn't
1394        # try to do anything bus specific with it.
1395        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1396            return None
1397
1398        if self.parent.buses:
1399            # The parent node is a bus node
1400            return self.parent
1401
1402        # Same bus node as parent (possibly None)
1403        return self.parent.bus_node
1404
1405    def _init_crossrefs(
1406        self, default_prop_types: bool = False, err_on_deprecated: bool = False
1407    ) -> None:
1408        # Initializes all properties that require cross-references to other
1409        # nodes, like 'phandle' and 'phandles'. This is done after all nodes
1410        # have been initialized.
1411        self._init_props(
1412            default_prop_types=default_prop_types, err_on_deprecated=err_on_deprecated
1413        )
1414        self._init_interrupts()
1415        self._init_pinctrls()
1416
1417    def _init_props(self, default_prop_types: bool = False,
1418                    err_on_deprecated: bool = False) -> None:
1419        # Creates self.props. See the class docstring. Also checks that all
1420        # properties on the node are declared in its binding.
1421
1422        self.props = {}
1423
1424        if self._binding:
1425            prop2specs = self._binding.prop2specs
1426        else:
1427            prop2specs = None
1428
1429        # Initialize self.props
1430        if prop2specs:
1431            for prop_spec in prop2specs.values():
1432                self._init_prop(prop_spec, err_on_deprecated)
1433            self._check_undeclared_props()
1434        elif default_prop_types:
1435            for name in self._node.props:
1436                if name not in _DEFAULT_PROP_SPECS:
1437                    continue
1438                prop_spec = _DEFAULT_PROP_SPECS[name]
1439                val = self._prop_val(name, prop_spec, err_on_deprecated)
1440                self.props[name] = Property(prop_spec, val, self)
1441
1442    def _init_prop(self, prop_spec: PropertySpec,
1443                   err_on_deprecated: bool) -> None:
1444        # _init_props() helper for initializing a single property.
1445        # 'prop_spec' is a PropertySpec object from the node's binding.
1446
1447        name = prop_spec.name
1448        prop_type = prop_spec.type
1449        if not prop_type:
1450            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1451
1452        val = self._prop_val(name, prop_spec, err_on_deprecated)
1453
1454        if val is None:
1455            # 'required: false' property that wasn't there, or a property type
1456            # for which we store no data.
1457            return
1458
1459        enum = prop_spec.enum
1460        for subval in val if isinstance(val, list) else [val]:
1461            if enum and subval not in enum:
1462                _err(f"value of property '{name}' on {self.path} in "
1463                    f"{self.edt.dts_path} ({subval!r}) is not in 'enum' list in "
1464                    f"{self.binding_path} ({enum!r})")
1465
1466        const = prop_spec.const
1467        if const is not None and val != const:
1468            _err(f"value of property '{name}' on {self.path} in "
1469                 f"{self.edt.dts_path} ({val!r}) "
1470                 "is different from the 'const' value specified in "
1471                 f"{self.binding_path} ({const!r})")
1472
1473        # Skip properties that start with '#', like '#size-cells', and mapping
1474        # properties like 'gpio-map'/'interrupt-map'
1475        if name[0] == "#" or name.endswith("-map"):
1476            return
1477
1478        self.props[name] = Property(prop_spec, val, self)
1479
1480    def _prop_val(
1481        self,
1482        name: str,
1483        prop_spec: PropertySpec,
1484        err_on_deprecated: bool,
1485    ) -> PropertyValType:
1486        # _init_prop() helper for getting the property's value
1487        #
1488        # name:
1489        #   Property name from binding
1490        #
1491        # prop_spec:
1492        #   PropertySpec from binding
1493        #
1494        # err_on_deprecated:
1495        #   If True, a deprecated property is an error instead of warning.
1496
1497        node = self._node
1498        prop = node.props.get(name)
1499        binding_path = prop_spec.binding.path
1500        prop_type = prop_spec.type
1501        deprecated = prop_spec.deprecated
1502        required = prop_spec.required
1503        default = prop_spec.default
1504        specifier_space = prop_spec.specifier_space
1505
1506        if prop and deprecated:
1507            msg = (
1508                f"'{name}' is marked as deprecated in 'properties:' "
1509                f"in {binding_path} for node {node.path}."
1510            )
1511            if err_on_deprecated:
1512                _err(msg)
1513            else:
1514                _LOG.warning(msg)
1515
1516        if not prop:
1517            if required and self.status == "okay":
1518                _err(
1519                    f"'{name}' is marked as required in 'properties:' in "
1520                    f"{binding_path}, but does not appear in {node!r}"
1521                )
1522
1523            if default is not None:
1524                # YAML doesn't have a native format for byte arrays. We need to
1525                # convert those from an array like [0x12, 0x34, ...]. The
1526                # format has already been checked in
1527                # _check_prop_by_type().
1528                if prop_type == "uint8-array":
1529                    return bytes(default) # type: ignore
1530                return default
1531
1532            return False if prop_type == "boolean" else None
1533
1534        if prop_type == "boolean":
1535            if prop.type != Type.EMPTY:
1536                _err(
1537                    "'{0}' in {1!r} is defined with 'type: boolean' in {2}, "
1538                    "but is assigned a value ('{3}') instead of being empty "
1539                    "('{0};')".format(name, node, binding_path, prop)
1540                )
1541            return True
1542
1543        if prop_type == "int":
1544            return prop.to_num()
1545
1546        if prop_type == "array":
1547            return prop.to_nums()
1548
1549        if prop_type == "uint8-array":
1550            return prop.to_bytes()
1551
1552        if prop_type == "string":
1553            return prop.to_string()
1554
1555        if prop_type == "string-array":
1556            return prop.to_strings()
1557
1558        if prop_type == "phandle":
1559            return self.edt._node2enode[prop.to_node()]
1560
1561        if prop_type == "phandles":
1562            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1563
1564        if prop_type == "phandle-array":
1565            # This type is a bit high-level for dtlib as it involves
1566            # information from bindings and *-names properties, so there's no
1567            # to_phandle_array() in dtlib. Do the type check ourselves.
1568            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1569                _err(f"expected property '{name}' in {node.path} in "
1570                     f"{node.dt.filename} to be assigned "
1571                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1572                     f"(a mix of phandles and numbers), not '{prop}'")
1573
1574            return self._standard_phandle_val_list(prop, specifier_space)
1575
1576        if prop_type == "path":
1577            return self.edt._node2enode[prop.to_path()]
1578
1579        # prop_type == "compound". Checking that the 'type:'
1580        # value is valid is done in _check_prop_by_type().
1581        #
1582        # 'compound' is a dummy type for properties that don't fit any of the
1583        # patterns above, so that we can require all entries in 'properties:'
1584        # to have a 'type: ...'. No Property object is created for it.
1585        return None
1586
1587    def _check_undeclared_props(self) -> None:
1588        # Checks that all properties are declared in the binding
1589        wl = {"compatible", "status", "ranges", "phandle",
1590              "interrupt-parent", "interrupts-extended", "device_type"}
1591
1592        for prop_name in self._node.props:
1593            # Allow a few special properties to not be declared in the binding
1594            if (prop_name.endswith("-controller")
1595                or prop_name.startswith("#")
1596                or prop_name in wl):
1597                continue
1598
1599            if TYPE_CHECKING:
1600                assert self._binding
1601
1602            if prop_name not in self._binding.prop2specs:
1603                _err(f"'{prop_name}' appears in {self._node.path} in "
1604                     f"{self.edt.dts_path}, but is not declared in "
1605                     f"'properties:' in {self.binding_path}")
1606
1607    def _init_ranges(self) -> None:
1608        # Initializes self.ranges
1609        node = self._node
1610
1611        self.ranges = []
1612
1613        if "ranges" not in node.props:
1614            return
1615
1616        raw_child_address_cells = node.props.get("#address-cells")
1617        parent_address_cells = _address_cells(node)
1618        if raw_child_address_cells is None:
1619            child_address_cells = 2 # Default value per DT spec.
1620        else:
1621            child_address_cells = raw_child_address_cells.to_num()
1622        raw_child_size_cells = node.props.get("#size-cells")
1623        if raw_child_size_cells is None:
1624            child_size_cells = 1 # Default value per DT spec.
1625        else:
1626            child_size_cells = raw_child_size_cells.to_num()
1627
1628        # Number of cells for one translation 3-tuple in 'ranges'
1629        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1630
1631        if entry_cells == 0:
1632            if len(node.props["ranges"].value) == 0:
1633                return
1634            else:
1635                _err(f"'ranges' should be empty in {self._node.path} since "
1636                     f"<#address-cells> = {child_address_cells}, "
1637                     f"<#address-cells for parent> = {parent_address_cells} and "
1638                     f"<#size-cells> = {child_size_cells}")
1639
1640        for raw_range in _slice(node, "ranges", 4*entry_cells,
1641                                f"4*(<#address-cells> (= {child_address_cells}) + "
1642                                "<#address-cells for parent> "
1643                                f"(= {parent_address_cells}) + "
1644                                f"<#size-cells> (= {child_size_cells}))"):
1645
1646            child_bus_cells = child_address_cells
1647            if child_address_cells == 0:
1648                child_bus_addr = None
1649            else:
1650                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1651            parent_bus_cells = parent_address_cells
1652            if parent_address_cells == 0:
1653                parent_bus_addr = None
1654            else:
1655                parent_bus_addr = to_num(
1656                    raw_range[(4*child_address_cells):
1657                              (4*child_address_cells + 4*parent_address_cells)])
1658            length_cells = child_size_cells
1659            if child_size_cells == 0:
1660                length = None
1661            else:
1662                length = to_num(
1663                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1664
1665            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1666                                     parent_bus_cells, parent_bus_addr,
1667                                     length_cells, length))
1668
1669    def _init_regs(self) -> None:
1670        # Initializes self.regs
1671
1672        node = self._node
1673
1674        self.regs = []
1675
1676        if "reg" not in node.props:
1677            return
1678
1679        address_cells = _address_cells(node)
1680        size_cells = _size_cells(node)
1681
1682        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1683                              f"4*(<#address-cells> (= {address_cells}) + "
1684                              f"<#size-cells> (= {size_cells}))"):
1685            if address_cells == 0:
1686                addr = None
1687            else:
1688                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1689            if size_cells == 0:
1690                size = None
1691            else:
1692                size = to_num(raw_reg[4*address_cells:])
1693            # Size zero is ok for PCI devices
1694            if size_cells != 0 and size == 0 and not self.is_pci_device:
1695                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1696                     "(maybe you want a size of one or #size-cells = 0 "
1697                     "instead)")
1698
1699            # We'll fix up the name when we're done.
1700            self.regs.append(Register(self, None, addr, size))
1701
1702        _add_names(node, "reg", self.regs)
1703
1704    def _init_pinctrls(self) -> None:
1705        # Initializes self.pinctrls from any pinctrl-<index> properties
1706
1707        node = self._node
1708
1709        # pinctrl-<index> properties
1710        pinctrl_props = [prop for name, prop in node.props.items()
1711                         if re.match("pinctrl-[0-9]+", name)]
1712        # Sort by index
1713        pinctrl_props.sort(key=lambda prop: prop.name)
1714
1715        # Check indices
1716        for i, prop in enumerate(pinctrl_props):
1717            if prop.name != "pinctrl-" + str(i):
1718                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1719                     "- indices should be contiguous and start from zero")
1720
1721        self.pinctrls = []
1722        for prop in pinctrl_props:
1723            # We'll fix up the names below.
1724            self.pinctrls.append(PinCtrl(
1725                node=self,
1726                name=None,
1727                conf_nodes=[self.edt._node2enode[node]
1728                            for node in prop.to_nodes()]))
1729
1730        _add_names(node, "pinctrl", self.pinctrls)
1731
1732    def _init_interrupts(self) -> None:
1733        # Initializes self.interrupts
1734
1735        node = self._node
1736
1737        self.interrupts = []
1738
1739        for controller_node, data in _interrupts(node):
1740            # We'll fix up the names below.
1741            controller = self.edt._node2enode[controller_node]
1742            self.interrupts.append(ControllerAndData(
1743                node=self, controller=controller,
1744                data=self._named_cells(controller, data, "interrupt"),
1745                name=None, basename=None))
1746
1747        _add_names(node, "interrupt", self.interrupts)
1748
1749    def _standard_phandle_val_list(
1750            self,
1751            prop: dtlib_Property,
1752            specifier_space: Optional[str]
1753    ) -> List[Optional[ControllerAndData]]:
1754        # Parses a property like
1755        #
1756        #     <prop.name> = <phandle cell phandle cell ...>;
1757        #
1758        # where each phandle points to a controller node that has a
1759        #
1760        #     #<specifier_space>-cells = <size>;
1761        #
1762        # property that gives the number of cells in the value after the
1763        # controller's phandle in the property.
1764        #
1765        # E.g. with a property like
1766        #
1767        #     pwms = <&foo 1 2 &bar 3>;
1768        #
1769        # If 'specifier_space' is "pwm", then we should have this elsewhere
1770        # in the tree:
1771        #
1772        #     foo: ... {
1773        #             #pwm-cells = <2>;
1774        #     };
1775        #
1776        #     bar: ... {
1777        #             #pwm-cells = <1>;
1778        #     };
1779        #
1780        # These values can be given names using the <specifier_space>-names:
1781        # list in the binding for the phandle nodes.
1782        #
1783        # Also parses any
1784        #
1785        #     <specifier_space>-names = "...", "...", ...
1786        #
1787        # Returns a list of Optional[ControllerAndData] instances.
1788        #
1789        # An index is None if the underlying phandle-array element is
1790        # unspecified.
1791
1792        if not specifier_space:
1793            if prop.name.endswith("gpios"):
1794                # There's some slight special-casing for *-gpios properties in that
1795                # e.g. foo-gpios still maps to #gpio-cells rather than
1796                # #foo-gpio-cells
1797                specifier_space = "gpio"
1798            else:
1799                # Strip -s. We've already checked that property names end in -s
1800                # if there is no specifier space in _check_prop_by_type().
1801                specifier_space = prop.name[:-1]
1802
1803        res: List[Optional[ControllerAndData]] = []
1804
1805        for item in _phandle_val_list(prop, specifier_space):
1806            if item is None:
1807                res.append(None)
1808                continue
1809
1810            controller_node, data = item
1811            mapped_controller, mapped_data = (
1812                _map_phandle_array_entry(prop.node, controller_node,
1813                                         data, specifier_space))
1814
1815            controller = self.edt._node2enode[mapped_controller]
1816            # We'll fix up the names below.
1817            res.append(ControllerAndData(
1818                node=self, controller=controller,
1819                data=self._named_cells(controller, mapped_data,
1820                                       specifier_space),
1821                name=None, basename=specifier_space))
1822
1823        _add_names(self._node, specifier_space, res)
1824
1825        return res
1826
1827    def _named_cells(
1828            self,
1829            controller: 'Node',
1830            data: bytes,
1831            basename: str
1832    ) -> Dict[str, int]:
1833        # Returns a dictionary that maps <basename>-cells names given in the
1834        # binding for 'controller' to cell values. 'data' is the raw data, as a
1835        # byte array.
1836
1837        if not controller._binding:
1838            _err(f"{basename} controller {controller._node!r} "
1839                 f"for {self._node!r} lacks binding")
1840
1841        if basename in controller._binding.specifier2cells:
1842            cell_names: List[str] = controller._binding.specifier2cells[basename]
1843        else:
1844            # Treat no *-cells in the binding the same as an empty *-cells, so
1845            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1846            # '#clock-cells = <0>'.
1847            cell_names = []
1848
1849        data_list = to_nums(data)
1850        if len(data_list) != len(cell_names):
1851            _err(f"unexpected '{basename}-cells:' length in binding for "
1852                 f"{controller._node!r} - {len(cell_names)} "
1853                 f"instead of {len(data_list)}")
1854
1855        return dict(zip(cell_names, data_list))
1856
1857
1858class EDT:
1859    """
1860    Represents a devicetree augmented with information from bindings.
1861
1862    These attributes are available on EDT objects:
1863
1864    nodes:
1865      A list of Node objects for the nodes that appear in the devicetree
1866
1867    compat2nodes:
1868      A collections.defaultdict that maps each 'compatible' string that appears
1869      on some Node to a list of Nodes with that compatible.
1870      The collection is sorted so that enabled nodes appear first in the
1871      collection.
1872
1873    compat2okay:
1874      Like compat2nodes, but just for nodes with status 'okay'.
1875
1876    compat2notokay:
1877      Like compat2nodes, but just for nodes with status not 'okay'.
1878
1879    compat2vendor:
1880      A collections.defaultdict that maps each 'compatible' string that appears
1881      on some Node to a vendor name parsed from vendor_prefixes.
1882
1883    compat2model:
1884      A collections.defaultdict that maps each 'compatible' string that appears
1885      on some Node to a model name parsed from that compatible.
1886
1887    label2node:
1888      A dict that maps a node label to the node with that label.
1889
1890    dep_ord2node:
1891      A dict that maps an ordinal to the node with that dependency ordinal.
1892
1893    chosen_nodes:
1894      A dict that maps the properties defined on the devicetree's /chosen
1895      node to their values. 'chosen' is indexed by property name (a string),
1896      and values are converted to Node objects. Note that properties of the
1897      /chosen node which can't be converted to a Node are not included in
1898      the value.
1899
1900    dts_path:
1901      The .dts path passed to __init__()
1902
1903    dts_source:
1904      The final DTS source code of the loaded devicetree after merging nodes
1905      and processing /delete-node/ and /delete-property/, as a string
1906
1907    bindings_dirs:
1908      The bindings directory paths passed to __init__()
1909
1910    scc_order:
1911      A list of lists of Nodes. All elements of each list
1912      depend on each other, and the Nodes in any list do not depend
1913      on any Node in a subsequent list. Each list defines a Strongly
1914      Connected Component (SCC) of the graph.
1915
1916      For an acyclic graph each list will be a singleton. Cycles
1917      will be represented by lists with multiple nodes. Cycles are
1918      not expected to be present in devicetree graphs.
1919
1920    The standard library's pickle module can be used to marshal and
1921    unmarshal EDT objects.
1922    """
1923
1924    def __init__(self,
1925                 dts: Optional[str],
1926                 bindings_dirs: List[str],
1927                 warn_reg_unit_address_mismatch: bool = True,
1928                 default_prop_types: bool = True,
1929                 support_fixed_partitions_on_any_bus: bool = True,
1930                 infer_binding_for_paths: Optional[Iterable[str]] = None,
1931                 vendor_prefixes: Optional[Dict[str, str]] = None,
1932                 werror: bool = False):
1933        """EDT constructor.
1934
1935        dts:
1936          Path to devicetree .dts file. Passing None for this value
1937          is only for internal use; do not do that outside of edtlib.
1938
1939        bindings_dirs:
1940          List of paths to directories containing bindings, in YAML format.
1941          These directories are recursively searched for .yaml files.
1942
1943        warn_reg_unit_address_mismatch (default: True):
1944          If True, a warning is logged if a node has a 'reg' property where
1945          the address of the first entry does not match the unit address of the
1946          node
1947
1948        default_prop_types (default: True):
1949          If True, default property types will be used when a node has no
1950          bindings.
1951
1952        support_fixed_partitions_on_any_bus (default True):
1953          If True, set the Node.bus for 'fixed-partitions' compatible nodes
1954          to None.  This allows 'fixed-partitions' binding to match regardless
1955          of the bus the 'fixed-partition' is under.
1956
1957        infer_binding_for_paths (default: None):
1958          An iterable of devicetree paths identifying nodes for which bindings
1959          should be inferred from the node content.  (Child nodes are not
1960          processed.)  Pass none if no nodes should support inferred bindings.
1961
1962        vendor_prefixes (default: None):
1963          A dict mapping vendor prefixes in compatible properties to their
1964          descriptions. If given, compatibles in the form "manufacturer,device"
1965          for which "manufacturer" is neither a key in the dict nor a specially
1966          exempt set of grandfathered-in cases will cause warnings.
1967
1968        werror (default: False):
1969          If True, some edtlib specific warnings become errors. This currently
1970          errors out if 'dts' has any deprecated properties set, or an unknown
1971          vendor prefix is used.
1972        """
1973        # All instance attributes should be initialized here.
1974        # This makes it easy to keep track of them, which makes
1975        # implementing __deepcopy__() easier.
1976        # If you change this, make sure to update __deepcopy__() too,
1977        # and update the tests for that method.
1978
1979        # Public attributes (the rest are properties)
1980        self.nodes: List[Node] = []
1981        self.compat2nodes: Dict[str, List[Node]] = defaultdict(list)
1982        self.compat2okay: Dict[str, List[Node]] = defaultdict(list)
1983        self.compat2notokay: Dict[str, List[Node]] = defaultdict(list)
1984        self.compat2vendor: Dict[str, str] = defaultdict(str)
1985        self.compat2model: Dict[str, str]  = defaultdict(str)
1986        self.label2node: Dict[str, Node] = {}
1987        self.dep_ord2node: Dict[int, Node] = {}
1988        self.dts_path: str = dts # type: ignore
1989        self.bindings_dirs: List[str] = list(bindings_dirs)
1990
1991        # Saved kwarg values for internal use
1992        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
1993        self._default_prop_types: bool = default_prop_types
1994        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
1995        self._infer_binding_for_paths: Set[str] = set(infer_binding_for_paths or [])
1996        self._vendor_prefixes: Dict[str, str] = vendor_prefixes or {}
1997        self._werror: bool = bool(werror)
1998
1999        # Other internal state
2000        self._compat2binding: Dict[Tuple[str, Optional[str]], Binding] = {}
2001        self._graph: Graph = Graph()
2002        self._binding_paths: List[str] = _binding_paths(self.bindings_dirs)
2003        self._binding_fname2path: Dict[str, str] = {
2004            os.path.basename(path): path
2005            for path in self._binding_paths
2006        }
2007        self._node2enode: Dict[dtlib_Node, Node] = {}
2008
2009        if dts is not None:
2010            try:
2011                self._dt = DT(dts)
2012            except DTError as e:
2013                raise EDTError(e) from e
2014            self._finish_init()
2015
2016    def _finish_init(self) -> None:
2017        # This helper exists to make the __deepcopy__() implementation
2018        # easier to keep in sync with __init__().
2019        _check_dt(self._dt)
2020
2021        self._init_compat2binding()
2022        self._init_nodes()
2023        self._init_graph()
2024        self._init_luts()
2025
2026        self._check()
2027
2028    def get_node(self, path: str) -> Node:
2029        """
2030        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
2031        path or alias doesn't exist.
2032        """
2033        try:
2034            return self._node2enode[self._dt.get_node(path)]
2035        except DTError as e:
2036            _err(e)
2037
2038    @property
2039    def chosen_nodes(self) -> Dict[str, Node]:
2040        ret: Dict[str, Node] = {}
2041
2042        try:
2043            chosen = self._dt.get_node("/chosen")
2044        except DTError:
2045            return ret
2046
2047        for name, prop in chosen.props.items():
2048            try:
2049                node = prop.to_path()
2050            except DTError:
2051                # DTS value is not phandle or string, or path doesn't exist
2052                continue
2053
2054            ret[name] = self._node2enode[node]
2055
2056        return ret
2057
2058    def chosen_node(self, name: str) -> Optional[Node]:
2059        """
2060        Returns the Node pointed at by the property named 'name' in /chosen, or
2061        None if the property is missing
2062        """
2063        return self.chosen_nodes.get(name)
2064
2065    @property
2066    def dts_source(self) -> str:
2067        return f"{self._dt}"
2068
2069    def __repr__(self) -> str:
2070        return (f"<EDT for '{self.dts_path}', binding directories "
2071                f"'{self.bindings_dirs}'>")
2072
2073    def __deepcopy__(self, memo) -> 'EDT':
2074        """
2075        Implements support for the standard library copy.deepcopy()
2076        function on EDT instances.
2077        """
2078
2079        ret = EDT(
2080            None,
2081            self.bindings_dirs,
2082            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2083            default_prop_types=self._default_prop_types,
2084            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2085            infer_binding_for_paths=set(self._infer_binding_for_paths),
2086            vendor_prefixes=dict(self._vendor_prefixes),
2087            werror=self._werror
2088        )
2089        ret.dts_path = self.dts_path
2090        ret._dt = deepcopy(self._dt, memo)
2091        ret._finish_init()
2092        return ret
2093
2094    @property
2095    def scc_order(self) -> List[List[Node]]:
2096        try:
2097            return self._graph.scc_order()
2098        except Exception as e:
2099            raise EDTError(e)
2100
2101    def _process_properties_r(self, root_node: Node, props_node: Node) -> None:
2102        """
2103        Process props_node properties for dependencies, and add those as
2104        dependencies of root_node. Then walk through all the props_node
2105        children and do the same recursively, maintaining the same root_node.
2106
2107        This ensures that on a node with child nodes, the parent node includes
2108        the dependencies of all the child nodes as well as its own.
2109        """
2110        # A Node depends on any Nodes present in 'phandle',
2111        # 'phandles', or 'phandle-array' property values.
2112        for prop in props_node.props.values():
2113            if prop.type == 'phandle':
2114                self._graph.add_edge(root_node, prop.val)
2115            elif prop.type == 'phandles':
2116                if TYPE_CHECKING:
2117                    assert isinstance(prop.val, list)
2118                for phandle_node in prop.val:
2119                    self._graph.add_edge(root_node, phandle_node)
2120            elif prop.type == 'phandle-array':
2121                if TYPE_CHECKING:
2122                    assert isinstance(prop.val, list)
2123                for cd in prop.val:
2124                    if cd is None:
2125                        continue
2126                    if TYPE_CHECKING:
2127                        assert isinstance(cd, ControllerAndData)
2128                    self._graph.add_edge(root_node, cd.controller)
2129
2130        # A Node depends on whatever supports the interrupts it
2131        # generates.
2132        for intr in props_node.interrupts:
2133            self._graph.add_edge(root_node, intr.controller)
2134
2135        # If the binding defines child bindings, link the child properties to
2136        # the root_node as well.
2137        if props_node.has_child_binding:
2138            for child in props_node.children.values():
2139                if "compatible" in child.props:
2140                    # Not a child node, normal node on a different binding.
2141                    continue
2142                self._process_properties_r(root_node, child)
2143
2144    def _process_properties(self, node: Node) -> None:
2145        """
2146        Add node dependencies based on own as well as child node properties,
2147        start from the node itself.
2148        """
2149        self._process_properties_r(node, node)
2150
2151    def _init_graph(self) -> None:
2152        # Constructs a graph of dependencies between Node instances,
2153        # which is usable for computing a partial order over the dependencies.
2154        # The algorithm supports detecting dependency loops.
2155        #
2156        # Actually computing the SCC order is lazily deferred to the
2157        # first time the scc_order property is read.
2158
2159        for node in self.nodes:
2160            # Always insert root node
2161            if not node.parent:
2162                self._graph.add_node(node)
2163
2164            # A Node always depends on its parent.
2165            for child in node.children.values():
2166                self._graph.add_edge(child, node)
2167
2168            self._process_properties(node)
2169
2170    def _init_compat2binding(self) -> None:
2171        # Creates self._compat2binding, a dictionary that maps
2172        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2173        #
2174        # The Binding objects are created from YAML files discovered
2175        # in self.bindings_dirs as needed.
2176        #
2177        # For example, self._compat2binding["company,dev", "can"]
2178        # contains the Binding for the 'company,dev' device, when it
2179        # appears on the CAN bus.
2180        #
2181        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2182        # self._compat2binding["company,notonbus", None] is the Binding.
2183        #
2184        # Only bindings for 'compatible' strings that appear in the devicetree
2185        # are loaded.
2186
2187        dt_compats = _dt_compats(self._dt)
2188        # Searches for any 'compatible' string mentioned in the devicetree
2189        # files, with a regex
2190        dt_compats_search = re.compile(
2191            "|".join(re.escape(compat) for compat in dt_compats)
2192        ).search
2193
2194        for binding_path in self._binding_paths:
2195            with open(binding_path, encoding="utf-8") as f:
2196                contents = f.read()
2197
2198            # As an optimization, skip parsing files that don't contain any of
2199            # the .dts 'compatible' strings, which should be reasonably safe
2200            if not dt_compats_search(contents):
2201                continue
2202
2203            # Load the binding and check that it actually matches one of the
2204            # compatibles. Might get false positives above due to comments and
2205            # stuff.
2206
2207            try:
2208                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2209                # representing the file)
2210                raw = yaml.load(contents, Loader=_BindingLoader)
2211            except yaml.YAMLError as e:
2212                _err(
2213                        f"'{binding_path}' appears in binding directories "
2214                        f"but isn't valid YAML: {e}")
2215
2216            # Convert the raw data to a Binding object, erroring out
2217            # if necessary.
2218            binding = self._binding(raw, binding_path, dt_compats)
2219
2220            # Register the binding in self._compat2binding, along with
2221            # any child bindings that have their own compatibles.
2222            while binding is not None:
2223                if binding.compatible:
2224                    self._register_binding(binding)
2225                binding = binding.child_binding
2226
2227    def _binding(self,
2228                 raw: Optional[dict],
2229                 binding_path: str,
2230                 dt_compats: Set[str]) -> Optional[Binding]:
2231        # Convert a 'raw' binding from YAML to a Binding object and return it.
2232        #
2233        # Error out if the raw data looks like an invalid binding.
2234        #
2235        # Return None if the file doesn't contain a binding or the
2236        # binding's compatible isn't in dt_compats.
2237
2238        # Get the 'compatible:' string.
2239        if raw is None or "compatible" not in raw:
2240            # Empty file, binding fragment, spurious file, etc.
2241            return None
2242
2243        compatible = raw["compatible"]
2244
2245        if compatible not in dt_compats:
2246            # Not a compatible we care about.
2247            return None
2248
2249        # Initialize and return the Binding object.
2250        return Binding(binding_path, self._binding_fname2path, raw=raw)
2251
2252    def _register_binding(self, binding: Binding) -> None:
2253        # Do not allow two different bindings to have the same
2254        # 'compatible:'/'on-bus:' combo
2255        if TYPE_CHECKING:
2256            assert binding.compatible
2257        old_binding = self._compat2binding.get((binding.compatible,
2258                                                binding.on_bus))
2259        if old_binding:
2260            msg = (f"both {old_binding.path} and {binding.path} have "
2261                   f"'compatible: {binding.compatible}'")
2262            if binding.on_bus is not None:
2263                msg += f" and 'on-bus: {binding.on_bus}'"
2264            _err(msg)
2265
2266        # Register the binding.
2267        self._compat2binding[binding.compatible, binding.on_bus] = binding
2268
2269    def _init_nodes(self) -> None:
2270        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2271        # self.nodes
2272
2273        for dt_node in self._dt.node_iter():
2274            # Warning: We depend on parent Nodes being created before their
2275            # children. This is guaranteed by node_iter().
2276            node = Node(dt_node, self, self._fixed_partitions_no_bus)
2277            self.nodes.append(node)
2278            self._node2enode[dt_node] = node
2279
2280        for node in self.nodes:
2281            # Initialize properties that may depend on other Node objects having
2282            # been created, because they (either always or sometimes) reference
2283            # other nodes. Must be called separately after all nodes have been
2284            # created.
2285            node._init_crossrefs(
2286                default_prop_types=self._default_prop_types,
2287                err_on_deprecated=self._werror,
2288            )
2289
2290        if self._warn_reg_unit_address_mismatch:
2291            # This warning matches the simple_bus_reg warning in dtc
2292            for node in self.nodes:
2293                # Address mismatch is ok for PCI devices
2294                if (node.regs and node.regs[0].addr != node.unit_addr and
2295                        not node.is_pci_device):
2296                    _LOG.warning("unit address and first address in 'reg' "
2297                                 f"(0x{node.regs[0].addr:x}) don't match for "
2298                                 f"{node.path}")
2299
2300    def _init_luts(self) -> None:
2301        # Initialize node lookup tables (LUTs).
2302
2303        for node in self.nodes:
2304            for label in node.labels:
2305                self.label2node[label] = node
2306
2307            for compat in node.compats:
2308                if node.status == "okay":
2309                    self.compat2okay[compat].append(node)
2310                else:
2311                    self.compat2notokay[compat].append(node)
2312
2313                if compat in self.compat2vendor:
2314                    continue
2315
2316                # The regular expression comes from dt-schema.
2317                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2318                if not re.match(compat_re, compat):
2319                    _err(f"node '{node.path}' compatible '{compat}' "
2320                         'must match this regular expression: '
2321                         f"'{compat_re}'")
2322
2323                if ',' in compat and self._vendor_prefixes:
2324                    vendor, model = compat.split(',', 1)
2325                    if vendor in self._vendor_prefixes:
2326                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2327                        self.compat2model[compat] = model
2328
2329                    # As an exception, the root node can have whatever
2330                    # compatibles it wants. Other nodes get checked.
2331                    elif node.path != '/':
2332                        if self._werror:
2333                            handler_fn: Any = _err
2334                        else:
2335                            handler_fn = _LOG.warning
2336                        handler_fn(
2337                            f"node '{node.path}' compatible '{compat}' "
2338                            f"has unknown vendor prefix '{vendor}'")
2339
2340        for compat, nodes in self.compat2okay.items():
2341            self.compat2nodes[compat].extend(nodes)
2342
2343        for compat, nodes in self.compat2notokay.items():
2344            self.compat2nodes[compat].extend(nodes)
2345
2346        for nodeset in self.scc_order:
2347            node = nodeset[0]
2348            self.dep_ord2node[node.dep_ordinal] = node
2349
2350    def _check(self) -> None:
2351        # Tree-wide checks and warnings.
2352
2353        for binding in self._compat2binding.values():
2354            for spec in binding.prop2specs.values():
2355                if not spec.enum or spec.type != 'string':
2356                    continue
2357
2358                if not spec.enum_tokenizable:
2359                    _LOG.warning(
2360                        f"compatible '{binding.compatible}' "
2361                        f"in binding '{binding.path}' has non-tokenizable enum "
2362                        f"for property '{spec.name}': " +
2363                        ', '.join(repr(x) for x in spec.enum))
2364                elif not spec.enum_upper_tokenizable:
2365                    _LOG.warning(
2366                        f"compatible '{binding.compatible}' "
2367                        f"in binding '{binding.path}' has enum for property "
2368                        f"'{spec.name}' that is only tokenizable "
2369                        'in lowercase: ' +
2370                        ', '.join(repr(x) for x in spec.enum))
2371
2372        # Validate the contents of compatible properties.
2373        for node in self.nodes:
2374            if 'compatible' not in node.props:
2375                continue
2376
2377            compatibles = node.props['compatible'].val
2378
2379            # _check() runs after _init_compat2binding() has called
2380            # _dt_compats(), which already converted every compatible
2381            # property to a list of strings. So we know 'compatibles'
2382            # is a list, but add an assert for future-proofing.
2383            assert isinstance(compatibles, list)
2384
2385            for compat in compatibles:
2386                # This is also just for future-proofing.
2387                assert isinstance(compat, str)
2388
2389
2390def bindings_from_paths(yaml_paths: List[str],
2391                        ignore_errors: bool = False) -> List[Binding]:
2392    """
2393    Get a list of Binding objects from the yaml files 'yaml_paths'.
2394
2395    If 'ignore_errors' is True, YAML files that cause an EDTError when
2396    loaded are ignored. (No other exception types are silenced.)
2397    """
2398
2399    ret = []
2400    fname2path = {os.path.basename(path): path for path in yaml_paths}
2401    for path in yaml_paths:
2402        try:
2403            ret.append(Binding(path, fname2path))
2404        except EDTError:
2405            if ignore_errors:
2406                continue
2407            raise
2408
2409    return ret
2410
2411
2412class EDTError(Exception):
2413    "Exception raised for devicetree- and binding-related errors"
2414
2415#
2416# Public global functions
2417#
2418
2419
2420def load_vendor_prefixes_txt(vendor_prefixes: str) -> Dict[str, str]:
2421    """Load a vendor-prefixes.txt file and return a dict
2422    representation mapping a vendor prefix to the vendor name.
2423    """
2424    vnd2vendor: Dict[str, str] = {}
2425    with open(vendor_prefixes, 'r', encoding='utf-8') as f:
2426        for line in f:
2427            line = line.strip()
2428
2429            if not line or line.startswith('#'):
2430                # Comment or empty line.
2431                continue
2432
2433            # Other lines should be in this form:
2434            #
2435            # <vnd><TAB><vendor>
2436            vnd_vendor = line.split('\t', 1)
2437            assert len(vnd_vendor) == 2, line
2438            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2439    return vnd2vendor
2440
2441#
2442# Private global functions
2443#
2444
2445
2446def _dt_compats(dt: DT) -> Set[str]:
2447    # Returns a set() with all 'compatible' strings in the devicetree
2448    # represented by dt (a dtlib.DT instance)
2449
2450    return {compat
2451            for node in dt.node_iter()
2452                if "compatible" in node.props
2453                    for compat in node.props["compatible"].to_strings()}
2454
2455
2456def _binding_paths(bindings_dirs: List[str]) -> List[str]:
2457    # Returns a list with the paths to all bindings (.yaml files) in
2458    # 'bindings_dirs'
2459
2460    binding_paths = []
2461
2462    for bindings_dir in bindings_dirs:
2463        for root, _, filenames in os.walk(bindings_dir):
2464            for filename in filenames:
2465                if filename.endswith(".yaml") or filename.endswith(".yml"):
2466                    binding_paths.append(os.path.join(root, filename))
2467
2468    return binding_paths
2469
2470
2471def _binding_inc_error(msg):
2472    # Helper for reporting errors in the !include implementation
2473
2474    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2475
2476
2477def _check_include_dict(name: Optional[str],
2478                        allowlist: Optional[List[str]],
2479                        blocklist: Optional[List[str]],
2480                        child_filter: Optional[dict],
2481                        binding_path: Optional[str]) -> None:
2482    # Check that an 'include:' named 'name' with property-allowlist
2483    # 'allowlist', property-blocklist 'blocklist', and
2484    # child-binding filter 'child_filter' has valid structure.
2485
2486    if name is None:
2487        _err(f"'include:' element in {binding_path} "
2488             "should have a 'name' key")
2489
2490    if allowlist is not None and blocklist is not None:
2491        _err(f"'include:' of file '{name}' in {binding_path} "
2492             "should not specify both 'property-allowlist:' "
2493             "and 'property-blocklist:'")
2494
2495    while child_filter is not None:
2496        child_copy = deepcopy(child_filter)
2497        child_allowlist: Optional[List[str]] = (
2498            child_copy.pop('property-allowlist', None))
2499        child_blocklist: Optional[List[str]] = (
2500            child_copy.pop('property-blocklist', None))
2501        next_child_filter: Optional[dict] = (
2502            child_copy.pop('child-binding', None))
2503
2504        if child_copy:
2505            # We've popped out all the valid keys.
2506            _err(f"'include:' of file '{name}' in {binding_path} "
2507                 "should not have these unexpected contents in a "
2508                 f"'child-binding': {child_copy}")
2509
2510        if child_allowlist is not None and child_blocklist is not None:
2511            _err(f"'include:' of file '{name}' in {binding_path} "
2512                 "should not specify both 'property-allowlist:' and "
2513                 "'property-blocklist:' in a 'child-binding:'")
2514
2515        child_filter = next_child_filter
2516
2517
2518def _filter_properties(raw: dict,
2519                       allowlist: Optional[List[str]],
2520                       blocklist: Optional[List[str]],
2521                       child_filter: Optional[dict],
2522                       binding_path: Optional[str]) -> None:
2523    # Destructively modifies 'raw["properties"]' and
2524    # 'raw["child-binding"]', if they exist, according to
2525    # 'allowlist', 'blocklist', and 'child_filter'.
2526
2527    props = raw.get('properties')
2528    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2529
2530    child_binding = raw.get('child-binding')
2531    while child_filter is not None and child_binding is not None:
2532        _filter_properties_helper(child_binding.get('properties'),
2533                                  child_filter.get('property-allowlist'),
2534                                  child_filter.get('property-blocklist'),
2535                                  binding_path)
2536        child_filter = child_filter.get('child-binding')
2537        child_binding = child_binding.get('child-binding')
2538
2539
2540def _filter_properties_helper(props: Optional[dict],
2541                              allowlist: Optional[List[str]],
2542                              blocklist: Optional[List[str]],
2543                              binding_path: Optional[str]) -> None:
2544    if props is None or (allowlist is None and blocklist is None):
2545        return
2546
2547    _check_prop_filter('property-allowlist', allowlist, binding_path)
2548    _check_prop_filter('property-blocklist', blocklist, binding_path)
2549
2550    if allowlist is not None:
2551        allowset = set(allowlist)
2552        to_del = [prop for prop in props if prop not in allowset]
2553    else:
2554        if TYPE_CHECKING:
2555            assert blocklist
2556        blockset = set(blocklist)
2557        to_del = [prop for prop in props if prop in blockset]
2558
2559    for prop in to_del:
2560        del props[prop]
2561
2562
2563def _check_prop_filter(name: str, value: Optional[List[str]],
2564                       binding_path: Optional[str]) -> None:
2565    # Ensure an include: ... property-allowlist or property-blocklist
2566    # is a list.
2567
2568    if value is None:
2569        return
2570
2571    if not isinstance(value, list):
2572        _err(f"'{name}' value {value} in {binding_path} should be a list")
2573
2574
2575def _merge_props(to_dict: dict,
2576                 from_dict: dict,
2577                 parent: Optional[str],
2578                 binding_path: Optional[str],
2579                 check_required: bool = False):
2580    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2581    #
2582    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2583    # property, then the values are ORed together.
2584    #
2585    # If 'check_required' is True, then an error is raised if 'from_dict' has
2586    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2587    # bindings from "downgrading" requirements from bindings they include,
2588    # which might help keep bindings well-organized.
2589    #
2590    # It's an error for most other keys to appear in both 'from_dict' and
2591    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2592    # precedence.
2593    #
2594    # 'parent' is the name of the parent key containing 'to_dict' and
2595    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2596    # These are used to generate errors for sketchy property overwrites.
2597
2598    for prop in from_dict:
2599        if (isinstance(to_dict.get(prop), dict)
2600            and isinstance(from_dict[prop], dict)):
2601            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2602                         check_required)
2603        elif prop not in to_dict:
2604            to_dict[prop] = from_dict[prop]
2605        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2606            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2607                 f"from included file overwritten ('{from_dict[prop]}' "
2608                 f"replaced with '{to_dict[prop]}')")
2609        elif prop == "required":
2610            # Need a separate check here, because this code runs before
2611            # Binding._check()
2612            if not (isinstance(from_dict["required"], bool) and
2613                    isinstance(to_dict["required"], bool)):
2614                _err(f"malformed 'required:' setting for '{parent}' in "
2615                     f"'properties' in {binding_path}, expected true/false")
2616
2617            # 'required: true' takes precedence
2618            to_dict["required"] = to_dict["required"] or from_dict["required"]
2619
2620
2621def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2622                   check_required: bool) -> bool:
2623    # _merge_props() helper. Returns True in cases where it's bad that
2624    # to_dict[prop] takes precedence over from_dict[prop].
2625
2626    if to_dict[prop] == from_dict[prop]:
2627        return False
2628
2629    # These are overridden deliberately
2630    if prop in {"title", "description", "compatible"}:
2631        return False
2632
2633    if prop == "required":
2634        if not check_required:
2635            return False
2636        return from_dict[prop] and not to_dict[prop]
2637
2638    return True
2639
2640
2641def _binding_include(loader, node):
2642    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2643    # just becomes [foo, bar].
2644
2645    if isinstance(node, yaml.ScalarNode):
2646        # !include foo.yaml
2647        return [loader.construct_scalar(node)]
2648
2649    if isinstance(node, yaml.SequenceNode):
2650        # !include [foo.yaml, bar.yaml]
2651        return loader.construct_sequence(node)
2652
2653    _binding_inc_error("unrecognised node type in !include statement")
2654
2655
2656def _check_prop_by_type(prop_name: str,
2657                        options: dict,
2658                        binding_path: Optional[str]) -> None:
2659    # Binding._check_properties() helper. Checks 'type:', 'default:',
2660    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2661
2662    prop_type = options.get("type")
2663    default = options.get("default")
2664    const = options.get("const")
2665
2666    if prop_type is None:
2667        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2668             f"{binding_path}")
2669
2670    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2671                "string-array", "phandle", "phandles", "phandle-array",
2672                "path", "compound"}
2673
2674    if prop_type not in ok_types:
2675        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2676             f"has unknown type '{prop_type}', expected one of " +
2677             ", ".join(ok_types))
2678
2679    if "specifier-space" in options and prop_type != "phandle-array":
2680        _err(f"'specifier-space' in 'properties: {prop_name}' "
2681             f"has type '{prop_type}', expected 'phandle-array'")
2682
2683    if prop_type == "phandle-array":
2684        if not prop_name.endswith("s") and not "specifier-space" in options:
2685            _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2686                 f"has type 'phandle-array' and its name does not end in 's', "
2687                 f"but no 'specifier-space' was provided.")
2688
2689    # If you change const_types, be sure to update the type annotation
2690    # for PropertySpec.const.
2691    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2692    if const and prop_type not in const_types:
2693        _err(f"const in {binding_path} for property '{prop_name}' "
2694             f"has type '{prop_type}', expected one of " +
2695             ", ".join(const_types))
2696
2697    # Check default
2698
2699    if default is None:
2700        return
2701
2702    if prop_type in {"boolean", "compound", "phandle", "phandles",
2703                     "phandle-array", "path"}:
2704        _err("'default:' can't be combined with "
2705             f"'type: {prop_type}' for '{prop_name}' in "
2706             f"'properties:' in {binding_path}")
2707
2708    def ok_default() -> bool:
2709        # Returns True if 'default' is an okay default for the property's type.
2710        # If you change this, be sure to update the type annotation for
2711        # PropertySpec.default.
2712
2713        if (prop_type == "int" and isinstance(default, int)
2714            or prop_type == "string" and isinstance(default, str)):
2715            return True
2716
2717        # array, uint8-array, or string-array
2718
2719        if not isinstance(default, list):
2720            return False
2721
2722        if (prop_type == "array"
2723            and all(isinstance(val, int) for val in default)):
2724            return True
2725
2726        if (prop_type == "uint8-array"
2727            and all(isinstance(val, int)
2728                    and 0 <= val <= 255 for val in default)):
2729            return True
2730
2731        # string-array
2732        return all(isinstance(val, str) for val in default)
2733
2734    if not ok_default():
2735        _err(f"'default: {default}' is invalid for '{prop_name}' "
2736             f"in 'properties:' in {binding_path}, "
2737             f"which has type {prop_type}")
2738
2739
2740def _translate(addr: int, node: dtlib_Node) -> int:
2741    # Recursively translates 'addr' on 'node' to the address space(s) of its
2742    # parent(s), by looking at 'ranges' properties. Returns the translated
2743    # address.
2744
2745    if not node.parent or "ranges" not in node.parent.props:
2746        # No translation
2747        return addr
2748
2749    if not node.parent.props["ranges"].value:
2750        # DT spec.: "If the property is defined with an <empty> value, it
2751        # specifies that the parent and child address space is identical, and
2752        # no address translation is required."
2753        #
2754        # Treat this the same as a 'range' that explicitly does a one-to-one
2755        # mapping, as opposed to there not being any translation.
2756        return _translate(addr, node.parent)
2757
2758    # Gives the size of each component in a translation 3-tuple in 'ranges'
2759    child_address_cells = _address_cells(node)
2760    parent_address_cells = _address_cells(node.parent)
2761    child_size_cells = _size_cells(node)
2762
2763    # Number of cells for one translation 3-tuple in 'ranges'
2764    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2765
2766    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2767                            f"4*(<#address-cells> (= {child_address_cells}) + "
2768                            "<#address-cells for parent> "
2769                            f"(= {parent_address_cells}) + "
2770                            f"<#size-cells> (= {child_size_cells}))"):
2771        child_addr = to_num(raw_range[:4*child_address_cells])
2772        raw_range = raw_range[4*child_address_cells:]
2773
2774        parent_addr = to_num(raw_range[:4*parent_address_cells])
2775        raw_range = raw_range[4*parent_address_cells:]
2776
2777        child_len = to_num(raw_range)
2778
2779        if child_addr <= addr < child_addr + child_len:
2780            # 'addr' is within range of a translation in 'ranges'. Recursively
2781            # translate it and return the result.
2782            return _translate(parent_addr + addr - child_addr, node.parent)
2783
2784    # 'addr' is not within range of any translation in 'ranges'
2785    return addr
2786
2787
2788def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2789    # Helper for registering names from <foo>-names properties.
2790    #
2791    # node:
2792    #   Node which has a property that might need named elements.
2793    #
2794    # names-ident:
2795    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2796    #
2797    # objs:
2798    #   list of objects whose .name field should be set
2799
2800    full_names_ident = names_ident + "-names"
2801
2802    if full_names_ident in node.props:
2803        names = node.props[full_names_ident].to_strings()
2804        if len(names) != len(objs):
2805            _err(f"{full_names_ident} property in {node.path} "
2806                 f"in {node.dt.filename} has {len(names)} strings, "
2807                 f"expected {len(objs)} strings")
2808
2809        for obj, name in zip(objs, names):
2810            if obj is None:
2811                continue
2812            obj.name = name
2813    else:
2814        for obj in objs:
2815            if obj is not None:
2816                obj.name = None
2817
2818
2819def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2820    # Returns the node pointed at by the closest 'interrupt-parent', searching
2821    # the parents of 'node'. As of writing, this behavior isn't specified in
2822    # the DT spec., but seems to match what some .dts files except.
2823
2824    node: Optional[dtlib_Node] = start_node
2825
2826    while node:
2827        if "interrupt-parent" in node.props:
2828            return node.props["interrupt-parent"].to_node()
2829        node = node.parent
2830
2831    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2832         f"nor any of its parents has an 'interrupt-parent' property")
2833
2834
2835def _interrupts(node: dtlib_Node) -> List[Tuple[dtlib_Node, bytes]]:
2836    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2837    # interrupt generated by 'node'. <controller> is the destination of the
2838    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2839    # the data associated with the interrupt (as a 'bytes' object).
2840
2841    # Takes precedence over 'interrupts' if both are present
2842    if "interrupts-extended" in node.props:
2843        prop = node.props["interrupts-extended"]
2844
2845        ret: List[Tuple[dtlib_Node, bytes]] = []
2846        for entry in _phandle_val_list(prop, "interrupt"):
2847            if entry is None:
2848                _err(f"node '{node.path}' interrupts-extended property "
2849                     "has an empty element")
2850            iparent, spec = entry
2851            ret.append(_map_interrupt(node, iparent, spec))
2852        return ret
2853
2854    if "interrupts" in node.props:
2855        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2856        # the same interrupt parent for all interrupts
2857
2858        iparent = _interrupt_parent(node)
2859        interrupt_cells = _interrupt_cells(iparent)
2860
2861        return [_map_interrupt(node, iparent, raw)
2862                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2863                                  "4*<#interrupt-cells>")]
2864
2865    return []
2866
2867
2868def _map_interrupt(
2869        child: dtlib_Node,
2870        parent: dtlib_Node,
2871        child_spec: bytes
2872) -> Tuple[dtlib_Node, bytes]:
2873    # Translates an interrupt headed from 'child' to 'parent' with data
2874    # 'child_spec' through any 'interrupt-map' properties. Returns a
2875    # (<controller>, <data>) tuple with the final destination after mapping.
2876
2877    if "interrupt-controller" in parent.props:
2878        return (parent, child_spec)
2879
2880    def own_address_cells(node):
2881        # Used for parents pointed at by 'interrupt-map'. We can't use
2882        # _address_cells(), because it's the #address-cells property on 'node'
2883        # itself that matters.
2884
2885        address_cells = node.props.get("#address-cells")
2886        if not address_cells:
2887            _err(f"missing #address-cells on {node!r} "
2888                 "(while handling interrupt-map)")
2889        return address_cells.to_num()
2890
2891    def spec_len_fn(node):
2892        # Can't use _address_cells() here, because it's the #address-cells
2893        # property on 'node' itself that matters
2894        return own_address_cells(node) + _interrupt_cells(node)
2895
2896    parent, raw_spec = _map(
2897        "interrupt", child, parent, _raw_unit_addr(child) + child_spec,
2898        spec_len_fn, require_controller=True)
2899
2900    # Strip the parent unit address part, if any
2901    return (parent, raw_spec[4*own_address_cells(parent):])
2902
2903
2904def _map_phandle_array_entry(
2905        child: dtlib_Node,
2906        parent: dtlib_Node,
2907        child_spec: bytes,
2908        basename: str
2909) -> Tuple[dtlib_Node, bytes]:
2910    # Returns a (<controller>, <data>) tuple with the final destination after
2911    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
2912    # _map_interrupt().
2913
2914    def spec_len_fn(node):
2915        prop_name = f"#{basename}-cells"
2916        if prop_name not in node.props:
2917            _err(f"expected '{prop_name}' property on {node!r} "
2918                 f"(referenced by {child!r})")
2919        return node.props[prop_name].to_num()
2920
2921    # Do not require <prefix>-controller for anything but interrupts for now
2922    return _map(basename, child, parent, child_spec, spec_len_fn,
2923                require_controller=False)
2924
2925
2926def _map(
2927        prefix: str,
2928        child: dtlib_Node,
2929        parent: dtlib_Node,
2930        child_spec: bytes,
2931        spec_len_fn: Callable[[dtlib_Node], int],
2932        require_controller: bool
2933) -> Tuple[dtlib_Node, bytes]:
2934    # Common code for mapping through <prefix>-map properties, e.g.
2935    # interrupt-map and gpio-map.
2936    #
2937    # prefix:
2938    #   The prefix, e.g. "interrupt" or "gpio"
2939    #
2940    # child:
2941    #   The "sender", e.g. the node with 'interrupts = <...>'
2942    #
2943    # parent:
2944    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
2945    #   'interrupt-controller' (no mapping)
2946    #
2947    # child_spec:
2948    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
2949    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
2950    #
2951    # spec_len_fn:
2952    #   Function called on a parent specified in a *-map property to get the
2953    #   length of the parent specifier (data after phandle in *-map), in cells
2954    #
2955    # require_controller:
2956    #   If True, the final controller node after mapping is required to have
2957    #   to have a <prefix>-controller property.
2958
2959    map_prop = parent.props.get(prefix + "-map")
2960    if not map_prop:
2961        if require_controller and prefix + "-controller" not in parent.props:
2962            _err(f"expected '{prefix}-controller' property on {parent!r} "
2963                 f"(referenced by {child!r})")
2964
2965        # No mapping
2966        return (parent, child_spec)
2967
2968    masked_child_spec = _mask(prefix, child, parent, child_spec)
2969
2970    raw = map_prop.value
2971    while raw:
2972        if len(raw) < len(child_spec):
2973            _err(f"bad value for {map_prop!r}, missing/truncated child data")
2974        child_spec_entry = raw[:len(child_spec)]
2975        raw = raw[len(child_spec):]
2976
2977        if len(raw) < 4:
2978            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
2979        phandle = to_num(raw[:4])
2980        raw = raw[4:]
2981
2982        # Parent specified in *-map
2983        map_parent = parent.dt.phandle2node.get(phandle)
2984        if not map_parent:
2985            _err(f"bad phandle ({phandle}) in {map_prop!r}")
2986
2987        map_parent_spec_len = 4*spec_len_fn(map_parent)
2988        if len(raw) < map_parent_spec_len:
2989            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
2990        parent_spec = raw[:map_parent_spec_len]
2991        raw = raw[map_parent_spec_len:]
2992
2993        # Got one *-map row. Check if it matches the child data.
2994        if child_spec_entry == masked_child_spec:
2995            # Handle *-map-pass-thru
2996            parent_spec = _pass_thru(
2997                prefix, child, parent, child_spec, parent_spec)
2998
2999            # Found match. Recursively map and return it.
3000            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
3001                        require_controller)
3002
3003    _err(f"child specifier for {child!r} ({child_spec!r}) "
3004         f"does not appear in {map_prop!r}")
3005
3006
3007def _mask(
3008        prefix: str,
3009        child: dtlib_Node,
3010        parent: dtlib_Node,
3011        child_spec: bytes
3012) -> bytes:
3013    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
3014    # See _map() for the parameters.
3015
3016    mask_prop = parent.props.get(prefix + "-map-mask")
3017    if not mask_prop:
3018        # No mask
3019        return child_spec
3020
3021    mask = mask_prop.value
3022    if len(mask) != len(child_spec):
3023        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
3024             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
3025
3026    return _and(child_spec, mask)
3027
3028
3029def _pass_thru(
3030        prefix: str,
3031        child: dtlib_Node,
3032        parent: dtlib_Node,
3033        child_spec: bytes,
3034        parent_spec: bytes
3035) -> bytes:
3036    # Common code for handling <prefix>-map-thru properties, e.g.
3037    # interrupt-pass-thru.
3038    #
3039    # parent_spec:
3040    #   The parent data from the matched entry in the <prefix>-map property
3041    #
3042    # See _map() for the other parameters.
3043
3044    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
3045    if not pass_thru_prop:
3046        # No pass-thru
3047        return parent_spec
3048
3049    pass_thru = pass_thru_prop.value
3050    if len(pass_thru) != len(child_spec):
3051        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
3052             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
3053
3054    res = _or(_and(child_spec, pass_thru),
3055              _and(parent_spec, _not(pass_thru)))
3056
3057    # Truncate to length of parent spec.
3058    return res[-len(parent_spec):]
3059
3060
3061def _raw_unit_addr(node: dtlib_Node) -> bytes:
3062    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
3063    # #address-cells) as a raw 'bytes'
3064
3065    if 'reg' not in node.props:
3066        _err(f"{node!r} lacks 'reg' property "
3067             "(needed for 'interrupt-map' unit address lookup)")
3068
3069    addr_len = 4*_address_cells(node)
3070
3071    if len(node.props['reg'].value) < addr_len:
3072        _err(f"{node!r} has too short 'reg' property "
3073             "(while doing 'interrupt-map' unit address lookup)")
3074
3075    return node.props['reg'].value[:addr_len]
3076
3077
3078def _and(b1: bytes, b2: bytes) -> bytes:
3079    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
3080    # with ones on the left if the lengths are not equal.
3081
3082    # Pad on the left, to equal length
3083    maxlen = max(len(b1), len(b2))
3084    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
3085                                       b2.rjust(maxlen, b'\xff')))
3086
3087
3088def _or(b1: bytes, b2: bytes) -> bytes:
3089    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3090    # zeros on the left if the lengths are not equal.
3091
3092    # Pad on the left, to equal length
3093    maxlen = max(len(b1), len(b2))
3094    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3095                                       b2.rjust(maxlen, b'\x00')))
3096
3097
3098def _not(b: bytes) -> bytes:
3099    # Returns the bitwise not of the 'bytes' object 'b'
3100
3101    # ANDing with 0xFF avoids negative numbers
3102    return bytes(~x & 0xFF for x in b)
3103
3104
3105def _phandle_val_list(
3106        prop: dtlib_Property,
3107        n_cells_name: str
3108) -> List[Optional[Tuple[dtlib_Node, bytes]]]:
3109    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3110    # cells that make up each <value> is derived from the node pointed at by
3111    # the preceding <phandle>.
3112    #
3113    # prop:
3114    #   dtlib.Property with value to parse
3115    #
3116    # n_cells_name:
3117    #   The <name> part of the #<name>-cells property to look for on the nodes
3118    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3119    #
3120    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3121    # is the node pointed at by <phandle>. If <phandle> does not refer
3122    # to a node, the entire list element is None.
3123
3124    full_n_cells_name = f"#{n_cells_name}-cells"
3125
3126    res: List[Optional[Tuple[dtlib_Node, bytes]]] = []
3127
3128    raw = prop.value
3129    while raw:
3130        if len(raw) < 4:
3131            # Not enough room for phandle
3132            _err("bad value for " + repr(prop))
3133        phandle = to_num(raw[:4])
3134        raw = raw[4:]
3135
3136        node = prop.node.dt.phandle2node.get(phandle)
3137        if not node:
3138            # Unspecified phandle-array element. This is valid; a 0
3139            # phandle value followed by no cells is an empty element.
3140            res.append(None)
3141            continue
3142
3143        if full_n_cells_name not in node.props:
3144            _err(f"{node!r} lacks {full_n_cells_name}")
3145
3146        n_cells = node.props[full_n_cells_name].to_num()
3147        if len(raw) < 4*n_cells:
3148            _err("missing data after phandle in " + repr(prop))
3149
3150        res.append((node, raw[:4*n_cells]))
3151        raw = raw[4*n_cells:]
3152
3153    return res
3154
3155
3156def _address_cells(node: dtlib_Node) -> int:
3157    # Returns the #address-cells setting for 'node', giving the number of <u32>
3158    # cells used to encode the address in the 'reg' property
3159    if TYPE_CHECKING:
3160        assert node.parent
3161
3162    if "#address-cells" in node.parent.props:
3163        return node.parent.props["#address-cells"].to_num()
3164    return 2  # Default value per DT spec.
3165
3166
3167def _size_cells(node: dtlib_Node) -> int:
3168    # Returns the #size-cells setting for 'node', giving the number of <u32>
3169    # cells used to encode the size in the 'reg' property
3170    if TYPE_CHECKING:
3171        assert node.parent
3172
3173    if "#size-cells" in node.parent.props:
3174        return node.parent.props["#size-cells"].to_num()
3175    return 1  # Default value per DT spec.
3176
3177
3178def _interrupt_cells(node: dtlib_Node) -> int:
3179    # Returns the #interrupt-cells property value on 'node', erroring out if
3180    # 'node' has no #interrupt-cells property
3181
3182    if "#interrupt-cells" not in node.props:
3183        _err(f"{node!r} lacks #interrupt-cells")
3184    return node.props["#interrupt-cells"].to_num()
3185
3186
3187def _slice(node: dtlib_Node,
3188           prop_name: str,
3189           size: int,
3190           size_hint: str) -> List[bytes]:
3191    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3192
3193
3194def _check_dt(dt: DT) -> None:
3195    # Does devicetree sanity checks. dtlib is meant to be general and
3196    # anything-goes except for very special properties like phandle, but in
3197    # edtlib we can be pickier.
3198
3199    # Check that 'status' has one of the values given in the devicetree spec.
3200
3201    # Accept "ok" for backwards compatibility
3202    ok_status = {"ok", "okay", "disabled", "reserved", "fail", "fail-sss"}
3203
3204    for node in dt.node_iter():
3205        if "status" in node.props:
3206            try:
3207                status_val = node.props["status"].to_string()
3208            except DTError as e:
3209                # The error message gives the path
3210                _err(str(e))
3211
3212            if status_val not in ok_status:
3213                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3214                     f"in {node.dt.filename}, expected one of " +
3215                     ", ".join(ok_status) +
3216                     " (see the devicetree specification)")
3217
3218        ranges_prop = node.props.get("ranges")
3219        if ranges_prop:
3220            if ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3221                _err(f"expected 'ranges = < ... >;' in {node.path} in "
3222                     f"{node.dt.filename}, not '{ranges_prop}' "
3223                     "(see the devicetree specification)")
3224
3225
3226def _err(msg) -> NoReturn:
3227    raise EDTError(msg)
3228
3229# Logging object
3230_LOG = logging.getLogger(__name__)
3231
3232# Regular expression for non-alphanumeric-or-underscore characters.
3233_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3234
3235
3236def str_as_token(val: str) -> str:
3237    """Return a canonical representation of a string as a C token.
3238
3239    This converts special characters in 'val' to underscores, and
3240    returns the result."""
3241
3242    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3243
3244
3245# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3246# which could interfere with YAML loading in clients
3247class _BindingLoader(Loader):
3248    pass
3249
3250
3251# Add legacy '!include foo.yaml' handling
3252_BindingLoader.add_constructor("!include", _binding_include)
3253
3254#
3255# "Default" binding for properties which are defined by the spec.
3256#
3257# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3258# updating the documentation for the DT_PROP() macro in
3259# include/devicetree.h.
3260#
3261
3262_DEFAULT_PROP_TYPES: Dict[str, str] = {
3263    "compatible": "string-array",
3264    "status": "string",
3265    "ranges": "compound",  # NUMS or EMPTY
3266    "reg": "array",
3267    "reg-names": "string-array",
3268    "label": "string",
3269    "interrupts": "array",
3270    "interrupts-extended": "compound",
3271    "interrupt-names": "string-array",
3272    "interrupt-controller": "boolean",
3273}
3274
3275_STATUS_ENUM: List[str] = "ok okay disabled reserved fail fail-sss".split()
3276
3277def _raw_default_property_for(
3278        name: str
3279) -> Dict[str, Union[str, bool, List[str]]]:
3280    ret: Dict[str, Union[str, bool, List[str]]] = {
3281        'type': _DEFAULT_PROP_TYPES[name],
3282        'required': False,
3283    }
3284    if name == 'status':
3285        ret['enum'] = _STATUS_ENUM
3286    return ret
3287
3288_DEFAULT_PROP_BINDING: Binding = Binding(
3289    None, {},
3290    raw={
3291        'properties': {
3292            name: _raw_default_property_for(name)
3293            for name in _DEFAULT_PROP_TYPES
3294        },
3295    },
3296    require_compatible=False, require_description=False,
3297)
3298
3299_DEFAULT_PROP_SPECS: Dict[str, PropertySpec] = {
3300    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3301    for name in _DEFAULT_PROP_TYPES
3302}
3303