1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# SPDX-License-Identifier: BSD-3-Clause
4
5# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
6
7"""
8Library for working with devicetrees at a higher level compared to dtlib. Like
9dtlib, this library presents a tree of devicetree nodes, but the nodes are
10augmented with information from bindings and include some interpretation of
11properties. Some of this interpretation is based on conventions established
12by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
13source code is sometimes good reference material.
14
15Bindings are YAML files that describe devicetree nodes. Devicetree
16nodes are usually mapped to bindings via their 'compatible = "..."' property,
17but a binding can also come from a 'child-binding:' key in the binding for the
18parent devicetree node.
19
20Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
21which has all the information related to the node.
22
23The top-level entry points for the library are the EDT and Binding classes.
24See their constructor docstrings for details. There is also a
25bindings_from_paths() helper function.
26"""
27
28# NOTE: tests/test_edtlib.py is the test suite for this library.
29
30# Implementation notes
31# --------------------
32#
33# A '_' prefix on an identifier in Python is a convention for marking it private.
34# Please do not access private things. Instead, think of what API you need, and
35# add it.
36#
37# This module is not meant to have any global state. It should be possible to
38# create several EDT objects with independent binding paths and flags. If you
39# need to add a configuration parameter or the like, store it in the EDT
40# instance, and initialize it e.g. with a constructor argument.
41#
42# This library is layered on top of dtlib, and is not meant to expose it to
43# clients. This keeps the header generation script simple.
44#
45# General biased advice:
46#
47# - Consider using @property for APIs that don't need parameters. It makes
48#   functions look like attributes, which is less awkward in clients, and makes
49#   it easy to switch back and forth between variables and functions.
50#
51# - Think about the data type of the thing you're exposing. Exposing something
52#   as e.g. a list or a dictionary is often nicer and more flexible than adding
53#   a function.
54#
55# - Avoid get_*() prefixes on functions. Name them after the thing they return
56#   instead. This often makes the code read more naturally in callers.
57#
58#   Also, consider using @property instead of get_*().
59#
60# - Don't expose dtlib stuff directly.
61#
62# - Add documentation for any new APIs you add.
63#
64#   The convention here is that docstrings (quoted strings) are used for public
65#   APIs, and "doc comments" for internal functions.
66#
67#   @properties are documented in the class docstring, as if they were
68#   variables. See the existing @properties for a template.
69
70from collections import defaultdict
71from copy import deepcopy
72from dataclasses import dataclass
73from typing import Any, Callable, Dict, Iterable, List, NoReturn, \
74    Optional, Set, TYPE_CHECKING, Tuple, Union
75import logging
76import os
77import re
78
79import yaml
80try:
81    # Use the C LibYAML parser if available, rather than the Python parser.
82    # This makes e.g. gen_defines.py more than twice as fast.
83    from yaml import CLoader as Loader
84except ImportError:
85    from yaml import Loader     # type: ignore
86
87from devicetree.dtlib import DT, DTError, to_num, to_nums, Type
88from devicetree.dtlib import Node as dtlib_Node
89from devicetree.dtlib import Property as dtlib_Property
90from devicetree.grutils import Graph
91from devicetree._private import _slice_helper
92
93#
94# Public classes
95#
96
97
98class Binding:
99    """
100    Represents a parsed binding.
101
102    These attributes are available on Binding objects:
103
104    path:
105      The absolute path to the file defining the binding.
106
107    description:
108      The free-form description of the binding, or None.
109
110    compatible:
111      The compatible string the binding matches.
112
113      This may be None. For example, it's None when the Binding is inferred
114      from node properties. It can also be None for Binding objects created
115      using 'child-binding:' with no compatible.
116
117    prop2specs:
118      A dict mapping property names to PropertySpec objects
119      describing those properties' values.
120
121    specifier2cells:
122      A dict that maps specifier space names (like "gpio",
123      "clock", "pwm", etc.) to lists of cell names.
124
125      For example, if the binding YAML contains 'pin' and 'flags' cell names
126      for the 'gpio' specifier space, like this:
127
128          gpio-cells:
129          - pin
130          - flags
131
132      Then the Binding object will have a 'specifier2cells' attribute mapping
133      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
134      cells.
135
136    raw:
137      The binding as an object parsed from YAML.
138
139    bus:
140      If nodes with this binding's 'compatible' describe a bus, a string
141      describing the bus type (like "i2c") or a list describing supported
142      protocols (like ["i3c", "i2c"]). None otherwise.
143
144      Note that this is the raw value from the binding where it can be
145      a string or a list. Use "buses" instead unless you need the raw
146      value, where "buses" is always a list.
147
148    buses:
149      Deprived property from 'bus' where 'buses' is a list of bus(es),
150      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
151      no 'bus:' in this binding.
152
153    on_bus:
154      If nodes with this binding's 'compatible' appear on a bus, a string
155      describing the bus type (like "i2c"). None otherwise.
156
157    child_binding:
158      If this binding describes the properties of child nodes, then
159      this is a Binding object for those children; it is None otherwise.
160      A Binding object's 'child_binding.child_binding' is not None if there
161      are multiple levels of 'child-binding' descriptions in the binding.
162    """
163
164    def __init__(self, path: Optional[str], fname2path: Dict[str, str],
165                 raw: Any = None, require_compatible: bool = True,
166                 require_description: bool = True):
167        """
168        Binding constructor.
169
170        path:
171          Path to binding YAML file. May be None.
172
173        fname2path:
174          Map from include files to their absolute paths. Must
175          not be None, but may be empty.
176
177        raw:
178          Optional raw content in the binding.
179          This does not have to have any "include:" lines resolved.
180          May be left out, in which case 'path' is opened and read.
181          This can be used to resolve child bindings, for example.
182
183        require_compatible:
184          If True, it is an error if the binding does not contain a
185          "compatible:" line. If False, a missing "compatible:" is
186          not an error. Either way, "compatible:" must be a string
187          if it is present in the binding.
188
189        require_description:
190          If True, it is an error if the binding does not contain a
191          "description:" line. If False, a missing "description:" is
192          not an error. Either way, "description:" must be a string
193          if it is present in the binding.
194        """
195        self.path: Optional[str] = path
196        self._fname2path: Dict[str, str] = fname2path
197
198        if raw is None:
199            if path is None:
200                _err("you must provide either a 'path' or a 'raw' argument")
201            with open(path, encoding="utf-8") as f:
202                raw = yaml.load(f, Loader=_BindingLoader)
203
204        # Merge any included files into self.raw. This also pulls in
205        # inherited child binding definitions, so it has to be done
206        # before initializing those.
207        self.raw: dict = self._merge_includes(raw, self.path)
208
209        # Recursively initialize any child bindings. These don't
210        # require a 'compatible' or 'description' to be well defined,
211        # but they must be dicts.
212        if "child-binding" in raw:
213            if not isinstance(raw["child-binding"], dict):
214                _err(f"malformed 'child-binding:' in {self.path}, "
215                     "expected a binding (dictionary with keys/values)")
216            self.child_binding: Optional['Binding'] = Binding(
217                path, fname2path,
218                raw=raw["child-binding"],
219                require_compatible=False,
220                require_description=False)
221        else:
222            self.child_binding = None
223
224        # Make sure this is a well defined object.
225        self._check(require_compatible, require_description)
226
227        # Initialize look up tables.
228        self.prop2specs: Dict[str, 'PropertySpec'] = {}
229        for prop_name in self.raw.get("properties", {}).keys():
230            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
231        self.specifier2cells: Dict[str, List[str]] = {}
232        for key, val in self.raw.items():
233            if key.endswith("-cells"):
234                self.specifier2cells[key[:-len("-cells")]] = val
235
236    def __repr__(self) -> str:
237        if self.compatible:
238            compat = f" for compatible '{self.compatible}'"
239        else:
240            compat = ""
241        basename = os.path.basename(self.path or "")
242        return f"<Binding {basename}" + compat + ">"
243
244    @property
245    def description(self) -> Optional[str]:
246        "See the class docstring"
247        return self.raw.get('description')
248
249    @property
250    def compatible(self) -> Optional[str]:
251        "See the class docstring"
252        return self.raw.get('compatible')
253
254    @property
255    def bus(self) -> Union[None, str, List[str]]:
256        "See the class docstring"
257        return self.raw.get('bus')
258
259    @property
260    def buses(self) -> List[str]:
261        "See the class docstring"
262        if self.raw.get('bus') is not None:
263            return self._buses
264        else:
265            return []
266
267    @property
268    def on_bus(self) -> Optional[str]:
269        "See the class docstring"
270        return self.raw.get('on-bus')
271
272    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
273        # Constructor helper. Merges included files in
274        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
275        # source of include files, removing the "include" key while
276        # doing so.
277        #
278        # This treats 'binding_path' as the binding file being built up
279        # and uses it for error messages.
280
281        if "include" not in raw:
282            return raw
283
284        include = raw.pop("include")
285
286        # First, merge the included files together. If more than one included
287        # file has a 'required:' for a particular property, OR the values
288        # together, so that 'required: true' wins.
289
290        merged: Dict[str, Any] = {}
291
292        if isinstance(include, str):
293            # Simple scalar string case
294            _merge_props(merged, self._load_raw(include), None, binding_path,
295                         False)
296        elif isinstance(include, list):
297            # List of strings and maps. These types may be intermixed.
298            for elem in include:
299                if isinstance(elem, str):
300                    _merge_props(merged, self._load_raw(elem), None,
301                                 binding_path, False)
302                elif isinstance(elem, dict):
303                    name = elem.pop('name', None)
304                    allowlist = elem.pop('property-allowlist', None)
305                    blocklist = elem.pop('property-blocklist', None)
306                    child_filter = elem.pop('child-binding', None)
307
308                    if elem:
309                        # We've popped out all the valid keys.
310                        _err(f"'include:' in {binding_path} should not have "
311                             f"these unexpected contents: {elem}")
312
313                    _check_include_dict(name, allowlist, blocklist,
314                                        child_filter, binding_path)
315
316                    contents = self._load_raw(name)
317
318                    _filter_properties(contents, allowlist, blocklist,
319                                       child_filter, binding_path)
320                    _merge_props(merged, contents, None, binding_path, False)
321                else:
322                    _err(f"all elements in 'include:' in {binding_path} "
323                         "should be either strings or maps with a 'name' key "
324                         "and optional 'property-allowlist' or "
325                         f"'property-blocklist' keys, but got: {elem}")
326        else:
327            # Invalid item.
328            _err(f"'include:' in {binding_path} "
329                 f"should be a string or list, but has type {type(include)}")
330
331        # Next, merge the merged included files into 'raw'. Error out if
332        # 'raw' has 'required: false' while the merged included files have
333        # 'required: true'.
334
335        _merge_props(raw, merged, None, binding_path, check_required=True)
336
337        return raw
338
339    def _load_raw(self, fname: str) -> dict:
340        # Returns the contents of the binding given by 'fname' after merging
341        # any bindings it lists in 'include:' into it. 'fname' is just the
342        # basename of the file, so we check that there aren't multiple
343        # candidates.
344
345        path = self._fname2path.get(fname)
346
347        if not path:
348            _err(f"'{fname}' not found")
349
350        with open(path, encoding="utf-8") as f:
351            contents = yaml.load(f, Loader=_BindingLoader)
352            if not isinstance(contents, dict):
353                _err(f'{path}: invalid contents, expected a mapping')
354
355        return self._merge_includes(contents, path)
356
357    def _check(self, require_compatible: bool, require_description: bool):
358        # Does sanity checking on the binding.
359
360        raw = self.raw
361
362        if "compatible" in raw:
363            compatible = raw["compatible"]
364            if not isinstance(compatible, str):
365                _err(f"malformed 'compatible: {compatible}' "
366                     f"field in {self.path} - "
367                     f"should be a string, not {type(compatible).__name__}")
368        elif require_compatible:
369            _err(f"missing 'compatible' in {self.path}")
370
371        if "description" in raw:
372            description = raw["description"]
373            if not isinstance(description, str) or not description:
374                _err(f"malformed or empty 'description' in {self.path}")
375        elif require_description:
376            _err(f"missing 'description' in {self.path}")
377
378        # Allowed top-level keys. The 'include' key should have been
379        # removed by _load_raw() already.
380        ok_top = {"description", "compatible", "bus", "on-bus",
381                  "properties", "child-binding"}
382
383        # Descriptive errors for legacy bindings.
384        legacy_errors = {
385            "#cells": "expected *-cells syntax",
386            "child": "use 'bus: <bus>' instead",
387            "child-bus": "use 'bus: <bus>' instead",
388            "parent": "use 'on-bus: <bus>' instead",
389            "parent-bus": "use 'on-bus: <bus>' instead",
390            "sub-node": "use 'child-binding' instead",
391            "title": "use 'description' instead",
392        }
393
394        for key in raw:
395            if key in legacy_errors:
396                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
397
398            if key not in ok_top and not key.endswith("-cells"):
399                _err(f"unknown key '{key}' in {self.path}, "
400                     "expected one of {', '.join(ok_top)}, or *-cells")
401
402        if "bus" in raw:
403            bus = raw["bus"]
404            if not isinstance(bus, str) and \
405               (not isinstance(bus, list) and \
406                not all(isinstance(elem, str) for elem in bus)):
407                _err(f"malformed 'bus:' value in {self.path}, "
408                     "expected string or list of strings")
409
410            if isinstance(bus, list):
411                self._buses = bus
412            else:
413                # Convert bus into a list
414                self._buses = [bus]
415
416        if "on-bus" in raw and \
417           not isinstance(raw["on-bus"], str):
418            _err(f"malformed 'on-bus:' value in {self.path}, "
419                 "expected string")
420
421        self._check_properties()
422
423        for key, val in raw.items():
424            if key.endswith("-cells"):
425                if not isinstance(val, list) or \
426                   not all(isinstance(elem, str) for elem in val):
427                    _err(f"malformed '{key}:' in {self.path}, "
428                         "expected a list of strings")
429
430    def _check_properties(self) -> None:
431        # _check() helper for checking the contents of 'properties:'.
432
433        raw = self.raw
434
435        if "properties" not in raw:
436            return
437
438        ok_prop_keys = {"description", "type", "required",
439                        "enum", "const", "default", "deprecated",
440                        "specifier-space"}
441
442        for prop_name, options in raw["properties"].items():
443            for key in options:
444                if key not in ok_prop_keys:
445                    _err(f"unknown setting '{key}' in "
446                         f"'properties: {prop_name}: ...' in {self.path}, "
447                         f"expected one of {', '.join(ok_prop_keys)}")
448
449            _check_prop_by_type(prop_name, options, self.path)
450
451            for true_false_opt in ["required", "deprecated"]:
452                if true_false_opt in options:
453                    option = options[true_false_opt]
454                    if not isinstance(option, bool):
455                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
456                             f"for '{prop_name}' in 'properties' in {self.path}, "
457                             "expected true/false")
458
459            if options.get("deprecated") and options.get("required"):
460                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
461                      "have both 'deprecated' and 'required' set")
462
463            if "description" in options and \
464               not isinstance(options["description"], str):
465                _err("missing, malformed, or empty 'description' for "
466                     f"'{prop_name}' in 'properties' in {self.path}")
467
468            if "enum" in options and not isinstance(options["enum"], list):
469                _err(f"enum in {self.path} for property '{prop_name}' "
470                     "is not a list")
471
472
473class PropertySpec:
474    """
475    Represents a "property specification", i.e. the description of a
476    property provided by a binding file, like its type and description.
477
478    These attributes are available on PropertySpec objects:
479
480    binding:
481      The Binding object which defined this property.
482
483    name:
484      The property's name.
485
486    path:
487      The file where this property was defined. In case a binding includes
488      other bindings, this is the file where the property was last modified.
489
490    type:
491      The type of the property as a string, as given in the binding.
492
493    description:
494      The free-form description of the property as a string, or None.
495
496    enum:
497      A list of values the property may take as given in the binding, or None.
498
499    enum_tokenizable:
500      True if enum is not None and all the values in it are tokenizable;
501      False otherwise.
502
503      A property must have string type and an "enum:" in its binding to be
504      tokenizable. Additionally, the "enum:" values must be unique after
505      converting all non-alphanumeric characters to underscores (so "foo bar"
506      and "foo_bar" in the same "enum:" would not be tokenizable).
507
508    enum_upper_tokenizable:
509      Like 'enum_tokenizable', with the additional restriction that the
510      "enum:" values must be unique after uppercasing and converting
511      non-alphanumeric characters to underscores.
512
513    const:
514      The property's constant value as given in the binding, or None.
515
516    default:
517      The property's default value as given in the binding, or None.
518
519    deprecated:
520      True if the property is deprecated; False otherwise.
521
522    required:
523      True if the property is marked required; False otherwise.
524
525    specifier_space:
526      The specifier space for the property as given in the binding, or None.
527    """
528
529    def __init__(self, name: str, binding: Binding):
530        self.binding: Binding = binding
531        self.name: str = name
532        self._raw: Dict[str, Any] = self.binding.raw["properties"][name]
533
534    def __repr__(self) -> str:
535        return f"<PropertySpec {self.name} type '{self.type}'>"
536
537    @property
538    def path(self) -> Optional[str]:
539        "See the class docstring"
540        return self.binding.path
541
542    @property
543    def type(self) -> str:
544        "See the class docstring"
545        return self._raw["type"]
546
547    @property
548    def description(self) -> Optional[str]:
549        "See the class docstring"
550        return self._raw.get("description")
551
552    @property
553    def enum(self) -> Optional[list]:
554        "See the class docstring"
555        return self._raw.get("enum")
556
557    @property
558    def enum_tokenizable(self) -> bool:
559        "See the class docstring"
560        if not hasattr(self, '_enum_tokenizable'):
561            if self.type != 'string' or self.enum is None:
562                self._enum_tokenizable = False
563            else:
564                # Saving _as_tokens here lets us reuse it in
565                # enum_upper_tokenizable.
566                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
567                                          '_', value)
568                                   for value in self.enum]
569                self._enum_tokenizable = (len(self._as_tokens) ==
570                                          len(set(self._as_tokens)))
571
572        return self._enum_tokenizable
573
574    @property
575    def enum_upper_tokenizable(self) -> bool:
576        "See the class docstring"
577        if not hasattr(self, '_enum_upper_tokenizable'):
578            if not self.enum_tokenizable:
579                self._enum_upper_tokenizable = False
580            else:
581                self._enum_upper_tokenizable = \
582                    (len(self._as_tokens) ==
583                     len(set(x.upper() for x in self._as_tokens)))
584        return self._enum_upper_tokenizable
585
586    @property
587    def const(self) -> Union[None, int, List[int], str, List[str]]:
588        "See the class docstring"
589        return self._raw.get("const")
590
591    @property
592    def default(self) -> Union[None, int, List[int], str, List[str]]:
593        "See the class docstring"
594        return self._raw.get("default")
595
596    @property
597    def required(self) -> bool:
598        "See the class docstring"
599        return self._raw.get("required", False)
600
601    @property
602    def deprecated(self) -> bool:
603        "See the class docstring"
604        return self._raw.get("deprecated", False)
605
606    @property
607    def specifier_space(self) -> Optional[str]:
608        "See the class docstring"
609        return self._raw.get("specifier-space")
610
611PropertyValType = Union[int, str,
612                        List[int], List[str],
613                        'Node', List['Node'],
614                        List[Optional['ControllerAndData']],
615                        bytes, None]
616
617
618@dataclass
619class Property:
620    """
621    Represents a property on a Node, as set in its DT node and with
622    additional info from the 'properties:' section of the binding.
623
624    Only properties mentioned in 'properties:' get created. Properties of type
625    'compound' currently do not get Property instances, as it's not clear
626    what to generate for them.
627
628    These attributes are available on Property objects. Several are
629    just convenience accessors for attributes on the PropertySpec object
630    accessible via the 'spec' attribute.
631
632    These attributes are available on Property objects:
633
634    spec:
635      The PropertySpec object which specifies this property.
636
637    val:
638      The value of the property, with the format determined by spec.type,
639      which comes from the 'type:' string in the binding.
640
641        - For 'type: int/array/string/string-array', 'val' is what you'd expect
642          (a Python integer or string, or a list of them)
643
644        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
645          instance
646
647        - For 'type: phandles', 'val' is a list of the pointed-to Node
648          instances
649
650        - For 'type: phandle-array', 'val' is a list of ControllerAndData
651          instances. See the documentation for that class.
652
653    node:
654      The Node instance the property is on
655
656    name:
657      Convenience for spec.name.
658
659    description:
660      Convenience for spec.description with leading and trailing whitespace
661      (including newlines) removed. May be None.
662
663    type:
664      Convenience for spec.type.
665
666    val_as_token:
667      The value of the property as a token, i.e. with non-alphanumeric
668      characters replaced with underscores. This is only safe to access
669      if 'spec.enum_tokenizable' returns True.
670
671    enum_index:
672      The index of 'val' in 'spec.enum' (which comes from the 'enum:' list
673      in the binding), or None if spec.enum is None.
674    """
675
676    spec: PropertySpec
677    val: PropertyValType
678    node: 'Node'
679
680    @property
681    def name(self) -> str:
682        "See the class docstring"
683        return self.spec.name
684
685    @property
686    def description(self) -> Optional[str]:
687        "See the class docstring"
688        return self.spec.description.strip() if self.spec.description else None
689
690    @property
691    def type(self) -> str:
692        "See the class docstring"
693        return self.spec.type
694
695    @property
696    def val_as_token(self) -> str:
697        "See the class docstring"
698        assert isinstance(self.val, str)
699        return str_as_token(self.val)
700
701    @property
702    def enum_index(self) -> Optional[int]:
703        "See the class docstring"
704        enum = self.spec.enum
705        return enum.index(self.val) if enum else None
706
707
708@dataclass
709class Register:
710    """
711    Represents a register on a node.
712
713    These attributes are available on Register objects:
714
715    node:
716      The Node instance this register is from
717
718    name:
719      The name of the register as given in the 'reg-names' property, or None if
720      there is no 'reg-names' property
721
722    addr:
723      The starting address of the register, in the parent address space, or None
724      if #address-cells is zero. Any 'ranges' properties are taken into account.
725
726    size:
727      The length of the register in bytes
728    """
729
730    node: 'Node'
731    name: Optional[str]
732    addr: Optional[int]
733    size: Optional[int]
734
735
736@dataclass
737class Range:
738    """
739    Represents a translation range on a node as described by the 'ranges' property.
740
741    These attributes are available on Range objects:
742
743    node:
744      The Node instance this range is from
745
746    child_bus_cells:
747      The number of cells used to describe a child bus address.
748
749    child_bus_addr:
750      A physical address within the child bus address space, or None if the
751      child's #address-cells equals 0.
752
753    parent_bus_cells:
754      The number of cells used to describe a parent bus address.
755
756    parent_bus_addr:
757      A physical address within the parent bus address space, or None if the
758      parent's #address-cells equals 0.
759
760    length_cells:
761      The number of cells used to describe the size of range in
762      the child's address space.
763
764    length:
765      The size of the range in the child address space, or None if the
766      child's #size-cells equals 0.
767    """
768    node: 'Node'
769    child_bus_cells: int
770    child_bus_addr: Optional[int]
771    parent_bus_cells: int
772    parent_bus_addr: Optional[int]
773    length_cells: int
774    length: Optional[int]
775
776
777@dataclass
778class ControllerAndData:
779    """
780    Represents an entry in an 'interrupts' or 'type: phandle-array' property
781    value, e.g. <&ctrl-1 4 0> in
782
783        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
784
785    These attributes are available on ControllerAndData objects:
786
787    node:
788      The Node instance the property appears on
789
790    controller:
791      The Node instance for the controller (e.g. the controller the interrupt
792      gets sent to for interrupts)
793
794    data:
795      A dictionary that maps names from the *-cells key in the binding for the
796      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
797      above.
798
799      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
800
801    name:
802      The name of the entry as given in
803      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
804      *-names property
805
806    basename:
807      Basename for the controller when supporting named cells
808    """
809    node: 'Node'
810    controller: 'Node'
811    data: dict
812    name: Optional[str]
813    basename: Optional[str]
814
815
816@dataclass
817class PinCtrl:
818    """
819    Represents a pin control configuration for a set of pins on a device,
820    e.g. pinctrl-0 or pinctrl-1.
821
822    These attributes are available on PinCtrl objects:
823
824    node:
825      The Node instance the pinctrl-* property is on
826
827    name:
828      The name of the configuration, as given in pinctrl-names, or None if
829      there is no pinctrl-names property
830
831    name_as_token:
832      Like 'name', but with non-alphanumeric characters converted to underscores.
833
834    conf_nodes:
835      A list of Node instances for the pin configuration nodes, e.g.
836      the nodes pointed at by &state_1 and &state_2 in
837
838          pinctrl-0 = <&state_1 &state_2>;
839    """
840
841    node: 'Node'
842    name: Optional[str]
843    conf_nodes: List['Node']
844
845    @property
846    def name_as_token(self):
847        "See the class docstring"
848        return str_as_token(self.name) if self.name is not None else None
849
850
851class Node:
852    """
853    Represents a devicetree node, augmented with information from bindings, and
854    with some interpretation of devicetree properties. There's a one-to-one
855    correspondence between devicetree nodes and Nodes.
856
857    These attributes are available on Node objects:
858
859    edt:
860      The EDT instance this node is from
861
862    name:
863      The name of the node
864
865    unit_addr:
866      An integer with the ...@<unit-address> portion of the node name,
867      translated through any 'ranges' properties on parent nodes, or None if
868      the node name has no unit-address portion. PCI devices use a different
869      node name format ...@<dev>,<func> or ...@<dev> (e.g. "pcie@1,0"), in
870      this case None is returned.
871
872    description:
873      The description string from the binding for the node, or None if the node
874      has no binding. Leading and trailing whitespace (including newlines) is
875      removed.
876
877    path:
878      The devicetree path of the node
879
880    label:
881      The text from the 'label' property on the node, or None if the node has
882      no 'label'
883
884    labels:
885      A list of all of the devicetree labels for the node, in the same order
886      as the labels appear, but with duplicates removed.
887
888      This corresponds to the actual devicetree source labels, unlike the
889      "label" attribute, which is the value of a devicetree property named
890      "label".
891
892    parent:
893      The Node instance for the devicetree parent of the Node, or None if the
894      node is the root node
895
896    children:
897      A dictionary with the Node instances for the devicetree children of the
898      node, indexed by name
899
900    dep_ordinal:
901      A non-negative integer value such that the value for a Node is
902      less than the value for all Nodes that depend on it.
903
904      The ordinal is defined for all Nodes, and is unique among nodes in its
905      EDT 'nodes' list.
906
907    required_by:
908      A list with the nodes that directly depend on the node
909
910    depends_on:
911      A list with the nodes that the node directly depends on
912
913    status:
914      The node's status property value, as a string, or "okay" if the node
915      has no status property set. If the node's status property is "ok",
916      it is converted to "okay" for consistency.
917
918    read_only:
919      True if the node has a 'read-only' property, and False otherwise
920
921    matching_compat:
922      The 'compatible' string for the binding that matched the node, or None if
923      the node has no binding
924
925    binding_path:
926      The path to the binding file for the node, or None if the node has no
927      binding
928
929    compats:
930      A list of 'compatible' strings for the node, in the same order that
931      they're listed in the .dts file
932
933    ranges:
934      A list of Range objects extracted from the node's ranges property.
935      The list is empty if the node does not have a range property.
936
937    regs:
938      A list of Register objects for the node's registers
939
940    props:
941      A dict that maps property names to Property objects.
942      Property objects are created for all devicetree properties on the node
943      that are mentioned in 'properties:' in the binding.
944
945    aliases:
946      A list of aliases for the node. This is fetched from the /aliases node.
947
948    interrupts:
949      A list of ControllerAndData objects for the interrupts generated by the
950      node. The list is empty if the node does not generate interrupts.
951
952    pinctrls:
953      A list of PinCtrl objects for the pinctrl-<index> properties on the
954      node, sorted by index. The list is empty if the node does not have any
955      pinctrl-<index> properties.
956
957    buses:
958      If the node is a bus node (has a 'bus:' key in its binding), then this
959      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
960      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
961      If the node is not a bus node, then this attribute is an empty list.
962
963    on_buses:
964      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
965      multiple protocols are supported via the same bus. The bus is determined
966      by searching upwards for a parent node whose binding has a 'bus:' key,
967      returning the value of the first 'bus:' key found. If none of the node's
968      parents has a 'bus:' key, this attribute is an empty list.
969
970    bus_node:
971      Like on_bus, but contains the Node for the bus controller, or None if the
972      node is not on a bus.
973
974    flash_controller:
975      The flash controller for the node. Only meaningful for nodes representing
976      flash partitions.
977
978    spi_cs_gpio:
979      The device's SPI GPIO chip select as a ControllerAndData instance, if it
980      exists, and None otherwise. See
981      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
982
983    gpio_hogs:
984      A list of ControllerAndData objects for the GPIOs hogged by the node. The
985      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
986      nodes.
987
988    is_pci_device:
989      True if the node is a PCI device.
990    """
991
992    def __init__(self,
993                 dt_node: dtlib_Node,
994                 edt: 'EDT',
995                 compats: List[str]):
996        '''
997        For internal use only; not meant to be used outside edtlib itself.
998        '''
999        # Public, some of which are initialized properly later:
1000        self.edt: 'EDT' = edt
1001        self.dep_ordinal: int = -1
1002        self.matching_compat: Optional[str] = None
1003        self.binding_path: Optional[str] = None
1004        self.compats: List[str] = compats
1005        self.ranges: List[Range] = []
1006        self.regs: List[Register] = []
1007        self.props: Dict[str, Property] = {}
1008        self.interrupts: List[ControllerAndData] = []
1009        self.pinctrls: List[PinCtrl] = []
1010        self.bus_node: Optional['Node'] = None
1011
1012        # Private, don't touch outside the class:
1013        self._node: dtlib_Node = dt_node
1014        self._binding: Optional[Binding] = None
1015
1016    @property
1017    def name(self) -> str:
1018        "See the class docstring"
1019        return self._node.name
1020
1021    @property
1022    def unit_addr(self) -> Optional[int]:
1023        "See the class docstring"
1024
1025        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1026
1027        # PCI devices use a different node name format (e.g. "pcie@1,0")
1028        if "@" not in self.name or self.is_pci_device:
1029            return None
1030
1031        try:
1032            addr = int(self.name.split("@", 1)[1], 16)
1033        except ValueError:
1034            _err(f"{self!r} has non-hex unit address")
1035
1036        return _translate(addr, self._node)
1037
1038    @property
1039    def description(self) -> Optional[str]:
1040        "See the class docstring."
1041        if self._binding:
1042            return self._binding.description
1043        return None
1044
1045    @property
1046    def path(self) ->  str:
1047        "See the class docstring"
1048        return self._node.path
1049
1050    @property
1051    def label(self) -> Optional[str]:
1052        "See the class docstring"
1053        if "label" in self._node.props:
1054            return self._node.props["label"].to_string()
1055        return None
1056
1057    @property
1058    def labels(self) -> List[str]:
1059        "See the class docstring"
1060        return self._node.labels
1061
1062    @property
1063    def parent(self) -> Optional['Node']:
1064        "See the class docstring"
1065        return self.edt._node2enode.get(self._node.parent) # type: ignore
1066
1067    @property
1068    def children(self) -> Dict[str, 'Node']:
1069        "See the class docstring"
1070        # Could be initialized statically too to preserve identity, but not
1071        # sure if needed. Parent nodes being initialized before their children
1072        # would need to be kept in mind.
1073        return {name: self.edt._node2enode[node]
1074                for name, node in self._node.nodes.items()}
1075
1076    def child_index(self, node) -> int:
1077        """Get the index of *node* in self.children.
1078        Raises KeyError if the argument is not a child of this node.
1079        """
1080        if not hasattr(self, '_child2index'):
1081            # Defer initialization of this lookup table until this
1082            # method is callable to handle parents needing to be
1083            # initialized before their chidlren. By the time we
1084            # return from __init__, 'self.children' is callable.
1085            self._child2index: Dict[str, int] = {}
1086            for index, child_path in enumerate(child.path for child in
1087                                               self.children.values()):
1088                self._child2index[child_path] = index
1089
1090        return self._child2index[node.path]
1091
1092    @property
1093    def required_by(self) -> List['Node']:
1094        "See the class docstring"
1095        return self.edt._graph.required_by(self)
1096
1097    @property
1098    def depends_on(self) -> List['Node']:
1099        "See the class docstring"
1100        return self.edt._graph.depends_on(self)
1101
1102    @property
1103    def status(self) -> str:
1104        "See the class docstring"
1105        status = self._node.props.get("status")
1106
1107        if status is None:
1108            as_string = "okay"
1109        else:
1110            as_string = status.to_string()
1111
1112        if as_string == "ok":
1113            as_string = "okay"
1114
1115        return as_string
1116
1117    @property
1118    def read_only(self) -> bool:
1119        "See the class docstring"
1120        return "read-only" in self._node.props
1121
1122    @property
1123    def aliases(self) -> List[str]:
1124        "See the class docstring"
1125        return [alias for alias, node in self._node.dt.alias2node.items()
1126                if node is self._node]
1127
1128    @property
1129    def buses(self) -> List[str]:
1130        "See the class docstring"
1131        if self._binding:
1132            return self._binding.buses
1133        return []
1134
1135    @property
1136    def on_buses(self) -> List[str]:
1137        "See the class docstring"
1138        bus_node = self.bus_node
1139        return bus_node.buses if bus_node else []
1140
1141    @property
1142    def flash_controller(self) -> 'Node':
1143        "See the class docstring"
1144
1145        # The node path might be something like
1146        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1147        # up two levels to get the flash and check its compat. The flash
1148        # controller might be the flash itself (for cases like NOR flashes).
1149        # For the case of 'soc-nv-flash', we assume the controller is the
1150        # parent of the flash node.
1151
1152        if not self.parent or not self.parent.parent:
1153            _err(f"flash partition {self!r} lacks parent or grandparent node")
1154
1155        controller = self.parent.parent
1156        if controller.matching_compat == "soc-nv-flash":
1157            if controller.parent is None:
1158                _err(f"flash controller '{controller.path}' cannot be the root node")
1159            return controller.parent
1160        return controller
1161
1162    @property
1163    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1164        "See the class docstring"
1165
1166        if not ("spi" in self.on_buses
1167                and self.bus_node
1168                and "cs-gpios" in self.bus_node.props):
1169            return None
1170
1171        if not self.regs:
1172            _err(f"{self!r} needs a 'reg' property, to look up the "
1173                 "chip select index for SPI")
1174
1175        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1176        if TYPE_CHECKING:
1177            assert isinstance(parent_cs_lst, list)
1178
1179        # cs-gpios is indexed by the unit address
1180        cs_index = self.regs[0].addr
1181        if TYPE_CHECKING:
1182            assert isinstance(cs_index, int)
1183
1184        if cs_index >= len(parent_cs_lst):
1185            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1186                 "is >= number of cs-gpios in "
1187                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1188
1189        ret = parent_cs_lst[cs_index]
1190        if TYPE_CHECKING:
1191            assert isinstance(ret, ControllerAndData)
1192        return ret
1193
1194    @property
1195    def gpio_hogs(self) -> List[ControllerAndData]:
1196        "See the class docstring"
1197
1198        if "gpio-hog" not in self.props:
1199            return []
1200
1201        if not self.parent or not "gpio-controller" in self.parent.props:
1202            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1203
1204        if not "#gpio-cells" in self.parent._node.props:
1205            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1206
1207        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1208        res = []
1209
1210        for item in _slice(self._node, "gpios", 4*n_cells,
1211                           f"4*(<#gpio-cells> (= {n_cells})"):
1212            controller = self.parent
1213            res.append(ControllerAndData(
1214                node=self, controller=controller,
1215                data=self._named_cells(controller, item, "gpio"),
1216                name=None, basename="gpio"))
1217
1218        return res
1219
1220    @property
1221    def is_pci_device(self) -> bool:
1222        "See the class docstring"
1223        return 'pcie' in self.on_buses
1224
1225    def __repr__(self) -> str:
1226        if self.binding_path:
1227            binding = "binding " + self.binding_path
1228        else:
1229            binding = "no binding"
1230        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1231
1232    def _init_binding(self) -> None:
1233        # Initializes Node.matching_compat, Node._binding, and
1234        # Node.binding_path.
1235        #
1236        # Node._binding holds the data from the node's binding file, in the
1237        # format returned by PyYAML (plain Python lists, dicts, etc.), or None
1238        # if the node has no binding.
1239
1240        # This relies on the parent of the node having already been
1241        # initialized, which is guaranteed by going through the nodes in
1242        # node_iter() order.
1243
1244        if self.path in self.edt._infer_binding_for_paths:
1245            self._binding_from_properties()
1246            return
1247
1248        if self.compats:
1249            on_buses = self.on_buses
1250
1251            for compat in self.compats:
1252                # When matching, respect the order of the 'compatible' entries,
1253                # and for each one first try to match against an explicitly
1254                # specified bus (if any) and then against any bus. This is so
1255                # that matching against bindings which do not specify a bus
1256                # works the same way in Zephyr as it does elsewhere.
1257                binding = None
1258
1259                for bus in on_buses:
1260                    if (compat, bus) in self.edt._compat2binding:
1261                        binding = self.edt._compat2binding[compat, bus]
1262                        break
1263
1264                if not binding:
1265                    if (compat, None) in self.edt._compat2binding:
1266                        binding = self.edt._compat2binding[compat, None]
1267                    else:
1268                        continue
1269
1270                self.binding_path = binding.path
1271                self.matching_compat = compat
1272                self._binding = binding
1273                return
1274        else:
1275            # No 'compatible' property. See if the parent binding has
1276            # a compatible. This can come from one or more levels of
1277            # nesting with 'child-binding:'.
1278
1279            binding_from_parent = self._binding_from_parent()
1280            if binding_from_parent:
1281                self._binding = binding_from_parent
1282                self.binding_path = self._binding.path
1283                self.matching_compat = self._binding.compatible
1284
1285                return
1286
1287        # No binding found
1288        self._binding = self.binding_path = self.matching_compat = None
1289
1290    def _binding_from_properties(self) -> None:
1291        # Sets up a Binding object synthesized from the properties in the node.
1292
1293        if self.compats:
1294            _err(f"compatible in node with inferred binding: {self.path}")
1295
1296        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1297        raw: Dict[str, Any] = {
1298            'description': 'Inferred binding from properties, via edtlib.',
1299            'properties': {},
1300        }
1301        for name, prop in self._node.props.items():
1302            pp: Dict[str, str] = {}
1303            if prop.type == Type.EMPTY:
1304                pp["type"] = "boolean"
1305            elif prop.type == Type.BYTES:
1306                pp["type"] = "uint8-array"
1307            elif prop.type == Type.NUM:
1308                pp["type"] = "int"
1309            elif prop.type == Type.NUMS:
1310                pp["type"] = "array"
1311            elif prop.type == Type.STRING:
1312                pp["type"] = "string"
1313            elif prop.type == Type.STRINGS:
1314                pp["type"] = "string-array"
1315            elif prop.type == Type.PHANDLE:
1316                pp["type"] = "phandle"
1317            elif prop.type == Type.PHANDLES:
1318                pp["type"] = "phandles"
1319            elif prop.type == Type.PHANDLES_AND_NUMS:
1320                pp["type"] = "phandle-array"
1321            elif prop.type == Type.PATH:
1322                pp["type"] = "path"
1323            else:
1324                _err(f"cannot infer binding from property: {prop} "
1325                     f"with type {prop.type!r}")
1326            raw['properties'][name] = pp
1327
1328        # Set up Node state.
1329        self.binding_path = None
1330        self.matching_compat = None
1331        self.compats = []
1332        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1333
1334    def _binding_from_parent(self) -> Optional[Binding]:
1335        # Returns the binding from 'child-binding:' in the parent node's
1336        # binding.
1337
1338        if not self.parent:
1339            return None
1340
1341        pbinding = self.parent._binding
1342        if not pbinding:
1343            return None
1344
1345        if pbinding.child_binding:
1346            return pbinding.child_binding
1347
1348        return None
1349
1350    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1351                  ) -> Optional['Node']:
1352        # Returns the value for self.bus_node. Relies on parent nodes being
1353        # initialized before their children.
1354
1355        if not self.parent:
1356            # This is the root node
1357            return None
1358
1359        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1360        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1361        # bus be None means we'll always match the binding for fixed-partitions
1362        # also this means want processing the fixed-partitions node we wouldn't
1363        # try to do anything bus specific with it.
1364        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1365            return None
1366
1367        if self.parent.buses:
1368            # The parent node is a bus node
1369            return self.parent
1370
1371        # Same bus node as parent (possibly None)
1372        return self.parent.bus_node
1373
1374    def _init_props(self, default_prop_types: bool = False,
1375                    err_on_deprecated: bool = False) -> None:
1376        # Creates self.props. See the class docstring. Also checks that all
1377        # properties on the node are declared in its binding.
1378
1379        self.props = {}
1380
1381        node = self._node
1382        if self._binding:
1383            prop2specs = self._binding.prop2specs
1384        else:
1385            prop2specs = None
1386
1387        # Initialize self.props
1388        if prop2specs:
1389            for prop_spec in prop2specs.values():
1390                self._init_prop(prop_spec, err_on_deprecated)
1391            self._check_undeclared_props()
1392        elif default_prop_types:
1393            for name in node.props:
1394                if name not in _DEFAULT_PROP_SPECS:
1395                    continue
1396                prop_spec = _DEFAULT_PROP_SPECS[name]
1397                val = self._prop_val(name, prop_spec.type, False, False, None,
1398                                     None, err_on_deprecated)
1399                self.props[name] = Property(prop_spec, val, self)
1400
1401    def _init_prop(self, prop_spec: PropertySpec,
1402                   err_on_deprecated: bool) -> None:
1403        # _init_props() helper for initializing a single property.
1404        # 'prop_spec' is a PropertySpec object from the node's binding.
1405
1406        name = prop_spec.name
1407        prop_type = prop_spec.type
1408        if not prop_type:
1409            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1410
1411        val = self._prop_val(name, prop_type, prop_spec.deprecated,
1412                             prop_spec.required, prop_spec.default,
1413                             prop_spec.specifier_space, err_on_deprecated)
1414
1415        if val is None:
1416            # 'required: false' property that wasn't there, or a property type
1417            # for which we store no data.
1418            return
1419
1420        enum = prop_spec.enum
1421        if enum and val not in enum:
1422            _err(f"value of property '{name}' on {self.path} in "
1423                 f"{self.edt.dts_path} ({val!r}) is not in 'enum' list in "
1424                 f"{self.binding_path} ({enum!r})")
1425
1426        const = prop_spec.const
1427        if const is not None and val != const:
1428            _err(f"value of property '{name}' on {self.path} in "
1429                 f"{self.edt.dts_path} ({val!r}) "
1430                 "is different from the 'const' value specified in "
1431                 f"{self.binding_path} ({const!r})")
1432
1433        # Skip properties that start with '#', like '#size-cells', and mapping
1434        # properties like 'gpio-map'/'interrupt-map'
1435        if name[0] == "#" or name.endswith("-map"):
1436            return
1437
1438        self.props[name] = Property(prop_spec, val, self)
1439
1440    def _prop_val(self, name: str, prop_type: str,
1441                  deprecated: bool, required: bool,
1442                  default: PropertyValType,
1443                  specifier_space: Optional[str],
1444                  err_on_deprecated: bool) -> PropertyValType:
1445        # _init_prop() helper for getting the property's value
1446        #
1447        # name:
1448        #   Property name from binding
1449        #
1450        # prop_type:
1451        #   Property type from binding (a string like "int")
1452        #
1453        # deprecated:
1454        #   True if the property is deprecated
1455        #
1456        # required:
1457        #   True if the property is required to exist
1458        #
1459        # default:
1460        #   Default value to use when the property doesn't exist, or None if
1461        #   the binding doesn't give a default value
1462        #
1463        # specifier_space:
1464        #   Property specifier-space from binding (if prop_type is "phandle-array")
1465        #
1466        # err_on_deprecated:
1467        #   If True, a deprecated property is an error instead of warning.
1468
1469        node = self._node
1470        prop = node.props.get(name)
1471
1472        if prop and deprecated:
1473            msg = (f"'{name}' is marked as deprecated in 'properties:' "
1474                   f"in {self.binding_path} for node {node.path}.")
1475            if err_on_deprecated:
1476                _err(msg)
1477            else:
1478                _LOG.warning(msg)
1479
1480        if not prop:
1481            if required and self.status == "okay":
1482                _err(f"'{name}' is marked as required in 'properties:' in "
1483                     f"{self.binding_path}, but does not appear in {node!r}")
1484
1485            if default is not None:
1486                # YAML doesn't have a native format for byte arrays. We need to
1487                # convert those from an array like [0x12, 0x34, ...]. The
1488                # format has already been checked in
1489                # _check_prop_by_type().
1490                if prop_type == "uint8-array":
1491                    return bytes(default) # type: ignore
1492                return default
1493
1494            return False if prop_type == "boolean" else None
1495
1496        if prop_type == "boolean":
1497            if prop.type != Type.EMPTY:
1498                _err("'{0}' in {1!r} is defined with 'type: boolean' in {2}, "
1499                     "but is assigned a value ('{3}') instead of being empty "
1500                     "('{0};')".format(name, node, self.binding_path, prop))
1501            return True
1502
1503        if prop_type == "int":
1504            return prop.to_num()
1505
1506        if prop_type == "array":
1507            return prop.to_nums()
1508
1509        if prop_type == "uint8-array":
1510            return prop.to_bytes()
1511
1512        if prop_type == "string":
1513            return prop.to_string()
1514
1515        if prop_type == "string-array":
1516            return prop.to_strings()
1517
1518        if prop_type == "phandle":
1519            return self.edt._node2enode[prop.to_node()]
1520
1521        if prop_type == "phandles":
1522            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1523
1524        if prop_type == "phandle-array":
1525            # This type is a bit high-level for dtlib as it involves
1526            # information from bindings and *-names properties, so there's no
1527            # to_phandle_array() in dtlib. Do the type check ourselves.
1528            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1529                _err(f"expected property '{name}' in {node.path} in "
1530                     f"{node.dt.filename} to be assigned "
1531                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1532                     f"(a mix of phandles and numbers), not '{prop}'")
1533
1534            return self._standard_phandle_val_list(prop, specifier_space)
1535
1536        if prop_type == "path":
1537            return self.edt._node2enode[prop.to_path()]
1538
1539        # prop_type == "compound". Checking that the 'type:'
1540        # value is valid is done in _check_prop_by_type().
1541        #
1542        # 'compound' is a dummy type for properties that don't fit any of the
1543        # patterns above, so that we can require all entries in 'properties:'
1544        # to have a 'type: ...'. No Property object is created for it.
1545        return None
1546
1547    def _check_undeclared_props(self) -> None:
1548        # Checks that all properties are declared in the binding
1549
1550        for prop_name in self._node.props:
1551            # Allow a few special properties to not be declared in the binding
1552            if prop_name.endswith("-controller") or \
1553               prop_name.startswith("#") or \
1554               prop_name in {
1555                   "compatible", "status", "ranges", "phandle",
1556                   "interrupt-parent", "interrupts-extended", "device_type"}:
1557                continue
1558
1559            if TYPE_CHECKING:
1560                assert self._binding
1561
1562            if prop_name not in self._binding.prop2specs:
1563                _err(f"'{prop_name}' appears in {self._node.path} in "
1564                     f"{self.edt.dts_path}, but is not declared in "
1565                     f"'properties:' in {self.binding_path}")
1566
1567    def _init_ranges(self) -> None:
1568        # Initializes self.ranges
1569        node = self._node
1570
1571        self.ranges = []
1572
1573        if "ranges" not in node.props:
1574            return
1575
1576        raw_child_address_cells = node.props.get("#address-cells")
1577        parent_address_cells = _address_cells(node)
1578        if raw_child_address_cells is None:
1579            child_address_cells = 2 # Default value per DT spec.
1580        else:
1581            child_address_cells = raw_child_address_cells.to_num()
1582        raw_child_size_cells = node.props.get("#size-cells")
1583        if raw_child_size_cells is None:
1584            child_size_cells = 1 # Default value per DT spec.
1585        else:
1586            child_size_cells = raw_child_size_cells.to_num()
1587
1588        # Number of cells for one translation 3-tuple in 'ranges'
1589        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1590
1591        if entry_cells == 0:
1592            if len(node.props["ranges"].value) == 0:
1593                return
1594            else:
1595                _err(f"'ranges' should be empty in {self._node.path} since "
1596                     f"<#address-cells> = {child_address_cells}, "
1597                     f"<#address-cells for parent> = {parent_address_cells} and "
1598                     f"<#size-cells> = {child_size_cells}")
1599
1600        for raw_range in _slice(node, "ranges", 4*entry_cells,
1601                                f"4*(<#address-cells> (= {child_address_cells}) + "
1602                                "<#address-cells for parent> "
1603                                f"(= {parent_address_cells}) + "
1604                                f"<#size-cells> (= {child_size_cells}))"):
1605
1606            child_bus_cells = child_address_cells
1607            if child_address_cells == 0:
1608                child_bus_addr = None
1609            else:
1610                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1611            parent_bus_cells = parent_address_cells
1612            if parent_address_cells == 0:
1613                parent_bus_addr = None
1614            else:
1615                parent_bus_addr = to_num(
1616                    raw_range[(4*child_address_cells):
1617                              (4*child_address_cells + 4*parent_address_cells)])
1618            length_cells = child_size_cells
1619            if child_size_cells == 0:
1620                length = None
1621            else:
1622                length = to_num(
1623                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1624
1625            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1626                                     parent_bus_cells, parent_bus_addr,
1627                                     length_cells, length))
1628
1629    def _init_regs(self) -> None:
1630        # Initializes self.regs
1631
1632        node = self._node
1633
1634        self.regs = []
1635
1636        if "reg" not in node.props:
1637            return
1638
1639        address_cells = _address_cells(node)
1640        size_cells = _size_cells(node)
1641
1642        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1643                              f"4*(<#address-cells> (= {address_cells}) + "
1644                              f"<#size-cells> (= {size_cells}))"):
1645            if address_cells == 0:
1646                addr = None
1647            else:
1648                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1649            if size_cells == 0:
1650                size = None
1651            else:
1652                size = to_num(raw_reg[4*address_cells:])
1653            # Size zero is ok for PCI devices
1654            if size_cells != 0 and size == 0 and not self.is_pci_device:
1655                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1656                     "(maybe you want a size of one or #size-cells = 0 "
1657                     "instead)")
1658
1659            # We'll fix up the name when we're done.
1660            self.regs.append(Register(self, None, addr, size))
1661
1662        _add_names(node, "reg", self.regs)
1663
1664    def _init_pinctrls(self) -> None:
1665        # Initializes self.pinctrls from any pinctrl-<index> properties
1666
1667        node = self._node
1668
1669        # pinctrl-<index> properties
1670        pinctrl_props = [prop for name, prop in node.props.items()
1671                         if re.match("pinctrl-[0-9]+", name)]
1672        # Sort by index
1673        pinctrl_props.sort(key=lambda prop: prop.name)
1674
1675        # Check indices
1676        for i, prop in enumerate(pinctrl_props):
1677            if prop.name != "pinctrl-" + str(i):
1678                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1679                     "- indices should be contiguous and start from zero")
1680
1681        self.pinctrls = []
1682        for prop in pinctrl_props:
1683            # We'll fix up the names below.
1684            self.pinctrls.append(PinCtrl(
1685                node=self,
1686                name=None,
1687                conf_nodes=[self.edt._node2enode[node]
1688                            for node in prop.to_nodes()]))
1689
1690        _add_names(node, "pinctrl", self.pinctrls)
1691
1692    def _init_interrupts(self) -> None:
1693        # Initializes self.interrupts
1694
1695        node = self._node
1696
1697        self.interrupts = []
1698
1699        for controller_node, data in _interrupts(node):
1700            # We'll fix up the names below.
1701            controller = self.edt._node2enode[controller_node]
1702            self.interrupts.append(ControllerAndData(
1703                node=self, controller=controller,
1704                data=self._named_cells(controller, data, "interrupt"),
1705                name=None, basename=None))
1706
1707        _add_names(node, "interrupt", self.interrupts)
1708
1709    def _standard_phandle_val_list(
1710            self,
1711            prop: dtlib_Property,
1712            specifier_space: Optional[str]
1713    ) -> List[Optional[ControllerAndData]]:
1714        # Parses a property like
1715        #
1716        #     <prop.name> = <phandle cell phandle cell ...>;
1717        #
1718        # where each phandle points to a controller node that has a
1719        #
1720        #     #<specifier_space>-cells = <size>;
1721        #
1722        # property that gives the number of cells in the value after the
1723        # controller's phandle in the property.
1724        #
1725        # E.g. with a property like
1726        #
1727        #     pwms = <&foo 1 2 &bar 3>;
1728        #
1729        # If 'specifier_space' is "pwm", then we should have this elsewhere
1730        # in the tree:
1731        #
1732        #     foo: ... {
1733        #             #pwm-cells = <2>;
1734        #     };
1735        #
1736        #     bar: ... {
1737        #             #pwm-cells = <1>;
1738        #     };
1739        #
1740        # These values can be given names using the <specifier_space>-names:
1741        # list in the binding for the phandle nodes.
1742        #
1743        # Also parses any
1744        #
1745        #     <specifier_space>-names = "...", "...", ...
1746        #
1747        # Returns a list of Optional[ControllerAndData] instances.
1748        #
1749        # An index is None if the underlying phandle-array element is
1750        # unspecified.
1751
1752        if not specifier_space:
1753            if prop.name.endswith("gpios"):
1754                # There's some slight special-casing for *-gpios properties in that
1755                # e.g. foo-gpios still maps to #gpio-cells rather than
1756                # #foo-gpio-cells
1757                specifier_space = "gpio"
1758            else:
1759                # Strip -s. We've already checked that property names end in -s
1760                # if there is no specifier space in _check_prop_by_type().
1761                specifier_space = prop.name[:-1]
1762
1763        res: List[Optional[ControllerAndData]] = []
1764
1765        for item in _phandle_val_list(prop, specifier_space):
1766            if item is None:
1767                res.append(None)
1768                continue
1769
1770            controller_node, data = item
1771            mapped_controller, mapped_data = \
1772                _map_phandle_array_entry(prop.node, controller_node, data,
1773                                         specifier_space)
1774
1775            controller = self.edt._node2enode[mapped_controller]
1776            # We'll fix up the names below.
1777            res.append(ControllerAndData(
1778                node=self, controller=controller,
1779                data=self._named_cells(controller, mapped_data,
1780                                       specifier_space),
1781                name=None, basename=specifier_space))
1782
1783        _add_names(self._node, specifier_space, res)
1784
1785        return res
1786
1787    def _named_cells(
1788            self,
1789            controller: 'Node',
1790            data: bytes,
1791            basename: str
1792    ) -> Dict[str, int]:
1793        # Returns a dictionary that maps <basename>-cells names given in the
1794        # binding for 'controller' to cell values. 'data' is the raw data, as a
1795        # byte array.
1796
1797        if not controller._binding:
1798            _err(f"{basename} controller {controller._node!r} "
1799                 f"for {self._node!r} lacks binding")
1800
1801        if basename in controller._binding.specifier2cells:
1802            cell_names: List[str] = controller._binding.specifier2cells[basename]
1803        else:
1804            # Treat no *-cells in the binding the same as an empty *-cells, so
1805            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1806            # '#clock-cells = <0>'.
1807            cell_names = []
1808
1809        data_list = to_nums(data)
1810        if len(data_list) != len(cell_names):
1811            _err(f"unexpected '{basename}-cells:' length in binding for "
1812                 f"{controller._node!r} - {len(cell_names)} "
1813                 f"instead of {len(data_list)}")
1814
1815        return dict(zip(cell_names, data_list))
1816
1817
1818class EDT:
1819    """
1820    Represents a devicetree augmented with information from bindings.
1821
1822    These attributes are available on EDT objects:
1823
1824    nodes:
1825      A list of Node objects for the nodes that appear in the devicetree
1826
1827    compat2nodes:
1828      A collections.defaultdict that maps each 'compatible' string that appears
1829      on some Node to a list of Nodes with that compatible.
1830
1831    compat2okay:
1832      Like compat2nodes, but just for nodes with status 'okay'.
1833
1834    compat2vendor:
1835      A collections.defaultdict that maps each 'compatible' string that appears
1836      on some Node to a vendor name parsed from vendor_prefixes.
1837
1838    compat2model:
1839      A collections.defaultdict that maps each 'compatible' string that appears
1840      on some Node to a model name parsed from that compatible.
1841
1842    label2node:
1843      A dict that maps a node label to the node with that label.
1844
1845    dep_ord2node:
1846      A dict that maps an ordinal to the node with that dependency ordinal.
1847
1848    chosen_nodes:
1849      A dict that maps the properties defined on the devicetree's /chosen
1850      node to their values. 'chosen' is indexed by property name (a string),
1851      and values are converted to Node objects. Note that properties of the
1852      /chosen node which can't be converted to a Node are not included in
1853      the value.
1854
1855    dts_path:
1856      The .dts path passed to __init__()
1857
1858    dts_source:
1859      The final DTS source code of the loaded devicetree after merging nodes
1860      and processing /delete-node/ and /delete-property/, as a string
1861
1862    bindings_dirs:
1863      The bindings directory paths passed to __init__()
1864
1865    scc_order:
1866      A list of lists of Nodes. All elements of each list
1867      depend on each other, and the Nodes in any list do not depend
1868      on any Node in a subsequent list. Each list defines a Strongly
1869      Connected Component (SCC) of the graph.
1870
1871      For an acyclic graph each list will be a singleton. Cycles
1872      will be represented by lists with multiple nodes. Cycles are
1873      not expected to be present in devicetree graphs.
1874
1875    The standard library's pickle module can be used to marshal and
1876    unmarshal EDT objects.
1877    """
1878
1879    def __init__(self,
1880                 dts: Optional[str],
1881                 bindings_dirs: List[str],
1882                 warn_reg_unit_address_mismatch: bool = True,
1883                 default_prop_types: bool = True,
1884                 support_fixed_partitions_on_any_bus: bool = True,
1885                 infer_binding_for_paths: Optional[Iterable[str]] = None,
1886                 vendor_prefixes: Optional[Dict[str, str]] = None,
1887                 werror: bool = False):
1888        """EDT constructor.
1889
1890        dts:
1891          Path to devicetree .dts file. Passing None for this value
1892          is only for internal use; do not do that outside of edtlib.
1893
1894        bindings_dirs:
1895          List of paths to directories containing bindings, in YAML format.
1896          These directories are recursively searched for .yaml files.
1897
1898        warn_reg_unit_address_mismatch (default: True):
1899          If True, a warning is logged if a node has a 'reg' property where
1900          the address of the first entry does not match the unit address of the
1901          node
1902
1903        default_prop_types (default: True):
1904          If True, default property types will be used when a node has no
1905          bindings.
1906
1907        support_fixed_partitions_on_any_bus (default True):
1908          If True, set the Node.bus for 'fixed-partitions' compatible nodes
1909          to None.  This allows 'fixed-partitions' binding to match regardless
1910          of the bus the 'fixed-partition' is under.
1911
1912        infer_binding_for_paths (default: None):
1913          An iterable of devicetree paths identifying nodes for which bindings
1914          should be inferred from the node content.  (Child nodes are not
1915          processed.)  Pass none if no nodes should support inferred bindings.
1916
1917        vendor_prefixes (default: None):
1918          A dict mapping vendor prefixes in compatible properties to their
1919          descriptions. If given, compatibles in the form "manufacturer,device"
1920          for which "manufacturer" is neither a key in the dict nor a specially
1921          exempt set of grandfathered-in cases will cause warnings.
1922
1923        werror (default: False):
1924          If True, some edtlib specific warnings become errors. This currently
1925          errors out if 'dts' has any deprecated properties set, or an unknown
1926          vendor prefix is used.
1927        """
1928        # All instance attributes should be initialized here.
1929        # This makes it easy to keep track of them, which makes
1930        # implementing __deepcopy__() easier.
1931        # If you change this, make sure to update __deepcopy__() too,
1932        # and update the tests for that method.
1933
1934        # Public attributes (the rest are properties)
1935        self.nodes: List[Node] = []
1936        self.compat2nodes: Dict[str, List[Node]] = defaultdict(list)
1937        self.compat2okay: Dict[str, List[Node]] = defaultdict(list)
1938        self.compat2vendor: Dict[str, str] = defaultdict(str)
1939        self.compat2model: Dict[str, str]  = defaultdict(str)
1940        self.label2node: Dict[str, Node] = {}
1941        self.dep_ord2node: Dict[int, Node] = {}
1942        self.dts_path: str = dts # type: ignore
1943        self.bindings_dirs: List[str] = list(bindings_dirs)
1944
1945        # Saved kwarg values for internal use
1946        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
1947        self._default_prop_types: bool = default_prop_types
1948        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
1949        self._infer_binding_for_paths: Set[str] = set(infer_binding_for_paths or [])
1950        self._vendor_prefixes: Dict[str, str] = vendor_prefixes or {}
1951        self._werror: bool = bool(werror)
1952
1953        # Other internal state
1954        self._compat2binding: Dict[Tuple[str, Optional[str]], Binding] = {}
1955        self._graph: Graph = Graph()
1956        self._binding_paths: List[str] = _binding_paths(self.bindings_dirs)
1957        self._binding_fname2path: Dict[str, str] = {
1958            os.path.basename(path): path
1959            for path in self._binding_paths
1960        }
1961        self._node2enode: Dict[dtlib_Node, Node] = {}
1962
1963        if dts is not None:
1964            try:
1965                self._dt = DT(dts)
1966            except DTError as e:
1967                raise EDTError(e) from e
1968            self._finish_init()
1969
1970    def _finish_init(self) -> None:
1971        # This helper exists to make the __deepcopy__() implementation
1972        # easier to keep in sync with __init__().
1973        _check_dt(self._dt)
1974
1975        self._init_compat2binding()
1976        self._init_nodes()
1977        self._init_graph()
1978        self._init_luts()
1979
1980        self._check()
1981
1982    def get_node(self, path: str) -> Node:
1983        """
1984        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
1985        path or alias doesn't exist.
1986        """
1987        try:
1988            return self._node2enode[self._dt.get_node(path)]
1989        except DTError as e:
1990            _err(e)
1991
1992    @property
1993    def chosen_nodes(self) -> Dict[str, Node]:
1994        ret: Dict[str, Node] = {}
1995
1996        try:
1997            chosen = self._dt.get_node("/chosen")
1998        except DTError:
1999            return ret
2000
2001        for name, prop in chosen.props.items():
2002            try:
2003                node = prop.to_path()
2004            except DTError:
2005                # DTS value is not phandle or string, or path doesn't exist
2006                continue
2007
2008            ret[name] = self._node2enode[node]
2009
2010        return ret
2011
2012    def chosen_node(self, name: str) -> Optional[Node]:
2013        """
2014        Returns the Node pointed at by the property named 'name' in /chosen, or
2015        None if the property is missing
2016        """
2017        return self.chosen_nodes.get(name)
2018
2019    @property
2020    def dts_source(self) -> str:
2021        return f"{self._dt}"
2022
2023    def __repr__(self) -> str:
2024        return f"<EDT for '{self.dts_path}', binding directories " \
2025            f"'{self.bindings_dirs}'>"
2026
2027    def __deepcopy__(self, memo) -> 'EDT':
2028        """
2029        Implements support for the standard library copy.deepcopy()
2030        function on EDT instances.
2031        """
2032
2033        ret = EDT(
2034            None,
2035            self.bindings_dirs,
2036            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2037            default_prop_types=self._default_prop_types,
2038            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2039            infer_binding_for_paths=set(self._infer_binding_for_paths),
2040            vendor_prefixes=dict(self._vendor_prefixes),
2041            werror=self._werror
2042        )
2043        ret.dts_path = self.dts_path
2044        ret._dt = deepcopy(self._dt, memo)
2045        ret._finish_init()
2046        return ret
2047
2048    @property
2049    def scc_order(self) -> List[List[Node]]:
2050        try:
2051            return self._graph.scc_order()
2052        except Exception as e:
2053            raise EDTError(e)
2054
2055    def _init_graph(self) -> None:
2056        # Constructs a graph of dependencies between Node instances,
2057        # which is usable for computing a partial order over the dependencies.
2058        # The algorithm supports detecting dependency loops.
2059        #
2060        # Actually computing the SCC order is lazily deferred to the
2061        # first time the scc_order property is read.
2062
2063        for node in self.nodes:
2064            # Always insert root node
2065            if not node.parent:
2066                self._graph.add_node(node)
2067
2068            # A Node always depends on its parent.
2069            for child in node.children.values():
2070                self._graph.add_edge(child, node)
2071
2072            # A Node depends on any Nodes present in 'phandle',
2073            # 'phandles', or 'phandle-array' property values.
2074            for prop in node.props.values():
2075                if prop.type == 'phandle':
2076                    self._graph.add_edge(node, prop.val)
2077                elif prop.type == 'phandles':
2078                    if TYPE_CHECKING:
2079                        assert isinstance(prop.val, list)
2080                    for phandle_node in prop.val:
2081                        self._graph.add_edge(node, phandle_node)
2082                elif prop.type == 'phandle-array':
2083                    if TYPE_CHECKING:
2084                        assert isinstance(prop.val, list)
2085                    for cd in prop.val:
2086                        if cd is None:
2087                            continue
2088                        if TYPE_CHECKING:
2089                            assert isinstance(cd, ControllerAndData)
2090                        self._graph.add_edge(node, cd.controller)
2091
2092            # A Node depends on whatever supports the interrupts it
2093            # generates.
2094            for intr in node.interrupts:
2095                self._graph.add_edge(node, intr.controller)
2096
2097    def _init_compat2binding(self) -> None:
2098        # Creates self._compat2binding, a dictionary that maps
2099        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2100        #
2101        # The Binding objects are created from YAML files discovered
2102        # in self.bindings_dirs as needed.
2103        #
2104        # For example, self._compat2binding["company,dev", "can"]
2105        # contains the Binding for the 'company,dev' device, when it
2106        # appears on the CAN bus.
2107        #
2108        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2109        # self._compat2binding["company,notonbus", None] is the Binding.
2110        #
2111        # Only bindings for 'compatible' strings that appear in the devicetree
2112        # are loaded.
2113
2114        dt_compats = _dt_compats(self._dt)
2115        # Searches for any 'compatible' string mentioned in the devicetree
2116        # files, with a regex
2117        dt_compats_search = re.compile(
2118            "|".join(re.escape(compat) for compat in dt_compats)
2119        ).search
2120
2121        for binding_path in self._binding_paths:
2122            with open(binding_path, encoding="utf-8") as f:
2123                contents = f.read()
2124
2125            # As an optimization, skip parsing files that don't contain any of
2126            # the .dts 'compatible' strings, which should be reasonably safe
2127            if not dt_compats_search(contents):
2128                continue
2129
2130            # Load the binding and check that it actually matches one of the
2131            # compatibles. Might get false positives above due to comments and
2132            # stuff.
2133
2134            try:
2135                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2136                # representing the file)
2137                raw = yaml.load(contents, Loader=_BindingLoader)
2138            except yaml.YAMLError as e:
2139                _err(
2140                        f"'{binding_path}' appears in binding directories "
2141                        f"but isn't valid YAML: {e}")
2142                continue
2143
2144            # Convert the raw data to a Binding object, erroring out
2145            # if necessary.
2146            binding = self._binding(raw, binding_path, dt_compats)
2147
2148            # Register the binding in self._compat2binding, along with
2149            # any child bindings that have their own compatibles.
2150            while binding is not None:
2151                if binding.compatible:
2152                    self._register_binding(binding)
2153                binding = binding.child_binding
2154
2155    def _binding(self,
2156                 raw: Optional[dict],
2157                 binding_path: str,
2158                 dt_compats: Set[str]) -> Optional[Binding]:
2159        # Convert a 'raw' binding from YAML to a Binding object and return it.
2160        #
2161        # Error out if the raw data looks like an invalid binding.
2162        #
2163        # Return None if the file doesn't contain a binding or the
2164        # binding's compatible isn't in dt_compats.
2165
2166        # Get the 'compatible:' string.
2167        if raw is None or "compatible" not in raw:
2168            # Empty file, binding fragment, spurious file, etc.
2169            return None
2170
2171        compatible = raw["compatible"]
2172
2173        if compatible not in dt_compats:
2174            # Not a compatible we care about.
2175            return None
2176
2177        # Initialize and return the Binding object.
2178        return Binding(binding_path, self._binding_fname2path, raw=raw)
2179
2180    def _register_binding(self, binding: Binding) -> None:
2181        # Do not allow two different bindings to have the same
2182        # 'compatible:'/'on-bus:' combo
2183        if TYPE_CHECKING:
2184            assert binding.compatible
2185        old_binding = self._compat2binding.get((binding.compatible,
2186                                                binding.on_bus))
2187        if old_binding:
2188            msg = (f"both {old_binding.path} and {binding.path} have "
2189                   f"'compatible: {binding.compatible}'")
2190            if binding.on_bus is not None:
2191                msg += f" and 'on-bus: {binding.on_bus}'"
2192            _err(msg)
2193
2194        # Register the binding.
2195        self._compat2binding[binding.compatible, binding.on_bus] = binding
2196
2197    def _init_nodes(self) -> None:
2198        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2199        # self.nodes
2200
2201        for dt_node in self._dt.node_iter():
2202            # Warning: We depend on parent Nodes being created before their
2203            # children. This is guaranteed by node_iter().
2204            if "compatible" in dt_node.props:
2205                compats = dt_node.props["compatible"].to_strings()
2206            else:
2207                compats = []
2208            node = Node(dt_node, self, compats)
2209            node.bus_node = node._bus_node(self._fixed_partitions_no_bus)
2210            node._init_binding()
2211            node._init_regs()
2212            node._init_ranges()
2213
2214            self.nodes.append(node)
2215            self._node2enode[dt_node] = node
2216
2217        for node in self.nodes:
2218            # These depend on all Node objects having been created, because
2219            # they (either always or sometimes) reference other nodes, so we
2220            # run them separately
2221            node._init_props(default_prop_types=self._default_prop_types,
2222                             err_on_deprecated=self._werror)
2223            node._init_interrupts()
2224            node._init_pinctrls()
2225
2226        if self._warn_reg_unit_address_mismatch:
2227            # This warning matches the simple_bus_reg warning in dtc
2228            for node in self.nodes:
2229                # Address mismatch is ok for PCI devices
2230                if (node.regs and node.regs[0].addr != node.unit_addr and
2231                        not node.is_pci_device):
2232                    _LOG.warning("unit address and first address in 'reg' "
2233                                 f"(0x{node.regs[0].addr:x}) don't match for "
2234                                 f"{node.path}")
2235
2236    def _init_luts(self) -> None:
2237        # Initialize node lookup tables (LUTs).
2238
2239        for node in self.nodes:
2240            for label in node.labels:
2241                self.label2node[label] = node
2242
2243            for compat in node.compats:
2244                self.compat2nodes[compat].append(node)
2245
2246                if node.status == "okay":
2247                    self.compat2okay[compat].append(node)
2248
2249                if compat in self.compat2vendor:
2250                    continue
2251
2252                # The regular expression comes from dt-schema.
2253                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2254                if not re.match(compat_re, compat):
2255                    _err(f"node '{node.path}' compatible '{compat}' "
2256                         'must match this regular expression: '
2257                         f"'{compat_re}'")
2258
2259                if ',' in compat and self._vendor_prefixes:
2260                    vendor, model = compat.split(',', 1)
2261                    if vendor in self._vendor_prefixes:
2262                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2263                        self.compat2model[compat] = model
2264
2265                    # As an exception, the root node can have whatever
2266                    # compatibles it wants. Other nodes get checked.
2267                    elif node.path != '/':
2268                        if self._werror:
2269                            handler_fn: Any = _err
2270                        else:
2271                            handler_fn = _LOG.warning
2272                        handler_fn(
2273                            f"node '{node.path}' compatible '{compat}' "
2274                            f"has unknown vendor prefix '{vendor}'")
2275
2276
2277        for nodeset in self.scc_order:
2278            node = nodeset[0]
2279            self.dep_ord2node[node.dep_ordinal] = node
2280
2281    def _check(self) -> None:
2282        # Tree-wide checks and warnings.
2283
2284        for binding in self._compat2binding.values():
2285            for spec in binding.prop2specs.values():
2286                if not spec.enum or spec.type != 'string':
2287                    continue
2288
2289                if not spec.enum_tokenizable:
2290                    _LOG.warning(
2291                        f"compatible '{binding.compatible}' "
2292                        f"in binding '{binding.path}' has non-tokenizable enum "
2293                        f"for property '{spec.name}': " +
2294                        ', '.join(repr(x) for x in spec.enum))
2295                elif not spec.enum_upper_tokenizable:
2296                    _LOG.warning(
2297                        f"compatible '{binding.compatible}' "
2298                        f"in binding '{binding.path}' has enum for property "
2299                        f"'{spec.name}' that is only tokenizable "
2300                        'in lowercase: ' +
2301                        ', '.join(repr(x) for x in spec.enum))
2302
2303        # Validate the contents of compatible properties.
2304        for node in self.nodes:
2305            if 'compatible' not in node.props:
2306                continue
2307
2308            compatibles = node.props['compatible'].val
2309
2310            # _check() runs after _init_compat2binding() has called
2311            # _dt_compats(), which already converted every compatible
2312            # property to a list of strings. So we know 'compatibles'
2313            # is a list, but add an assert for future-proofing.
2314            assert isinstance(compatibles, list)
2315
2316            for compat in compatibles:
2317                # This is also just for future-proofing.
2318                assert isinstance(compat, str)
2319
2320
2321def bindings_from_paths(yaml_paths: List[str],
2322                        ignore_errors: bool = False) -> List[Binding]:
2323    """
2324    Get a list of Binding objects from the yaml files 'yaml_paths'.
2325
2326    If 'ignore_errors' is True, YAML files that cause an EDTError when
2327    loaded are ignored. (No other exception types are silenced.)
2328    """
2329
2330    ret = []
2331    fname2path = {os.path.basename(path): path for path in yaml_paths}
2332    for path in yaml_paths:
2333        try:
2334            ret.append(Binding(path, fname2path))
2335        except EDTError:
2336            if ignore_errors:
2337                continue
2338            raise
2339
2340    return ret
2341
2342
2343class EDTError(Exception):
2344    "Exception raised for devicetree- and binding-related errors"
2345
2346#
2347# Public global functions
2348#
2349
2350
2351def load_vendor_prefixes_txt(vendor_prefixes: str) -> Dict[str, str]:
2352    """Load a vendor-prefixes.txt file and return a dict
2353    representation mapping a vendor prefix to the vendor name.
2354    """
2355    vnd2vendor: Dict[str, str] = {}
2356    with open(vendor_prefixes, 'r', encoding='utf-8') as f:
2357        for line in f:
2358            line = line.strip()
2359
2360            if not line or line.startswith('#'):
2361                # Comment or empty line.
2362                continue
2363
2364            # Other lines should be in this form:
2365            #
2366            # <vnd><TAB><vendor>
2367            vnd_vendor = line.split('\t', 1)
2368            assert len(vnd_vendor) == 2, line
2369            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2370    return vnd2vendor
2371
2372#
2373# Private global functions
2374#
2375
2376
2377def _dt_compats(dt: DT) -> Set[str]:
2378    # Returns a set() with all 'compatible' strings in the devicetree
2379    # represented by dt (a dtlib.DT instance)
2380
2381    return {compat
2382            for node in dt.node_iter()
2383                if "compatible" in node.props
2384                    for compat in node.props["compatible"].to_strings()}
2385
2386
2387def _binding_paths(bindings_dirs: List[str]) -> List[str]:
2388    # Returns a list with the paths to all bindings (.yaml files) in
2389    # 'bindings_dirs'
2390
2391    binding_paths = []
2392
2393    for bindings_dir in bindings_dirs:
2394        for root, _, filenames in os.walk(bindings_dir):
2395            for filename in filenames:
2396                if filename.endswith(".yaml") or filename.endswith(".yml"):
2397                    binding_paths.append(os.path.join(root, filename))
2398
2399    return binding_paths
2400
2401
2402def _binding_inc_error(msg):
2403    # Helper for reporting errors in the !include implementation
2404
2405    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2406
2407
2408def _check_include_dict(name: Optional[str],
2409                        allowlist: Optional[List[str]],
2410                        blocklist: Optional[List[str]],
2411                        child_filter: Optional[dict],
2412                        binding_path: Optional[str]) -> None:
2413    # Check that an 'include:' named 'name' with property-allowlist
2414    # 'allowlist', property-blocklist 'blocklist', and
2415    # child-binding filter 'child_filter' has valid structure.
2416
2417    if name is None:
2418        _err(f"'include:' element in {binding_path} "
2419             "should have a 'name' key")
2420
2421    if allowlist is not None and blocklist is not None:
2422        _err(f"'include:' of file '{name}' in {binding_path} "
2423             "should not specify both 'property-allowlist:' "
2424             "and 'property-blocklist:'")
2425
2426    while child_filter is not None:
2427        child_copy = deepcopy(child_filter)
2428        child_allowlist: Optional[List[str]] = \
2429            child_copy.pop('property-allowlist', None)
2430        child_blocklist: Optional[List[str]] = \
2431            child_copy.pop('property-blocklist', None)
2432        next_child_filter: Optional[dict] = \
2433            child_copy.pop('child-binding', None)
2434
2435        if child_copy:
2436            # We've popped out all the valid keys.
2437            _err(f"'include:' of file '{name}' in {binding_path} "
2438                 "should not have these unexpected contents in a "
2439                 f"'child-binding': {child_copy}")
2440
2441        if child_allowlist is not None and child_blocklist is not None:
2442            _err(f"'include:' of file '{name}' in {binding_path} "
2443                 "should not specify both 'property-allowlist:' and "
2444                 "'property-blocklist:' in a 'child-binding:'")
2445
2446        child_filter = next_child_filter
2447
2448
2449def _filter_properties(raw: dict,
2450                       allowlist: Optional[List[str]],
2451                       blocklist: Optional[List[str]],
2452                       child_filter: Optional[dict],
2453                       binding_path: Optional[str]) -> None:
2454    # Destructively modifies 'raw["properties"]' and
2455    # 'raw["child-binding"]', if they exist, according to
2456    # 'allowlist', 'blocklist', and 'child_filter'.
2457
2458    props = raw.get('properties')
2459    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2460
2461    child_binding = raw.get('child-binding')
2462    while child_filter is not None and child_binding is not None:
2463        _filter_properties_helper(child_binding.get('properties'),
2464                                  child_filter.get('property-allowlist'),
2465                                  child_filter.get('property-blocklist'),
2466                                  binding_path)
2467        child_filter = child_filter.get('child-binding')
2468        child_binding = child_binding.get('child-binding')
2469
2470
2471def _filter_properties_helper(props: Optional[dict],
2472                              allowlist: Optional[List[str]],
2473                              blocklist: Optional[List[str]],
2474                              binding_path: Optional[str]) -> None:
2475    if props is None or (allowlist is None and blocklist is None):
2476        return
2477
2478    _check_prop_filter('property-allowlist', allowlist, binding_path)
2479    _check_prop_filter('property-blocklist', blocklist, binding_path)
2480
2481    if allowlist is not None:
2482        allowset = set(allowlist)
2483        to_del = [prop for prop in props if prop not in allowset]
2484    else:
2485        if TYPE_CHECKING:
2486            assert blocklist
2487        blockset = set(blocklist)
2488        to_del = [prop for prop in props if prop in blockset]
2489
2490    for prop in to_del:
2491        del props[prop]
2492
2493
2494def _check_prop_filter(name: str, value: Optional[List[str]],
2495                       binding_path: Optional[str]) -> None:
2496    # Ensure an include: ... property-allowlist or property-blocklist
2497    # is a list.
2498
2499    if value is None:
2500        return
2501
2502    if not isinstance(value, list):
2503        _err(f"'{name}' value {value} in {binding_path} should be a list")
2504
2505
2506def _merge_props(to_dict: dict,
2507                 from_dict: dict,
2508                 parent: Optional[str],
2509                 binding_path: Optional[str],
2510                 check_required: bool = False):
2511    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2512    #
2513    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2514    # property, then the values are ORed together.
2515    #
2516    # If 'check_required' is True, then an error is raised if 'from_dict' has
2517    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2518    # bindings from "downgrading" requirements from bindings they include,
2519    # which might help keep bindings well-organized.
2520    #
2521    # It's an error for most other keys to appear in both 'from_dict' and
2522    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2523    # precedence.
2524    #
2525    # 'parent' is the name of the parent key containing 'to_dict' and
2526    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2527    # These are used to generate errors for sketchy property overwrites.
2528
2529    for prop in from_dict:
2530        if isinstance(to_dict.get(prop), dict) and \
2531           isinstance(from_dict[prop], dict):
2532            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2533                         check_required)
2534        elif prop not in to_dict:
2535            to_dict[prop] = from_dict[prop]
2536        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2537            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2538                 f"from included file overwritten ('{from_dict[prop]}' "
2539                 f"replaced with '{to_dict[prop]}')")
2540        elif prop == "required":
2541            # Need a separate check here, because this code runs before
2542            # Binding._check()
2543            if not (isinstance(from_dict["required"], bool) and
2544                    isinstance(to_dict["required"], bool)):
2545                _err(f"malformed 'required:' setting for '{parent}' in "
2546                     f"'properties' in {binding_path}, expected true/false")
2547
2548            # 'required: true' takes precedence
2549            to_dict["required"] = to_dict["required"] or from_dict["required"]
2550
2551
2552def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2553                   check_required: bool) -> bool:
2554    # _merge_props() helper. Returns True in cases where it's bad that
2555    # to_dict[prop] takes precedence over from_dict[prop].
2556
2557    if to_dict[prop] == from_dict[prop]:
2558        return False
2559
2560    # These are overridden deliberately
2561    if prop in {"title", "description", "compatible"}:
2562        return False
2563
2564    if prop == "required":
2565        if not check_required:
2566            return False
2567        return from_dict[prop] and not to_dict[prop]
2568
2569    return True
2570
2571
2572def _binding_include(loader, node):
2573    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2574    # just becomes [foo, bar].
2575
2576    if isinstance(node, yaml.ScalarNode):
2577        # !include foo.yaml
2578        return [loader.construct_scalar(node)]
2579
2580    if isinstance(node, yaml.SequenceNode):
2581        # !include [foo.yaml, bar.yaml]
2582        return loader.construct_sequence(node)
2583
2584    _binding_inc_error("unrecognised node type in !include statement")
2585
2586
2587def _check_prop_by_type(prop_name: str,
2588                        options: dict,
2589                        binding_path: Optional[str]) -> None:
2590    # Binding._check_properties() helper. Checks 'type:', 'default:',
2591    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2592
2593    prop_type = options.get("type")
2594    default = options.get("default")
2595    const = options.get("const")
2596
2597    if prop_type is None:
2598        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2599             f"{binding_path}")
2600
2601    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2602                "string-array", "phandle", "phandles", "phandle-array",
2603                "path", "compound"}
2604
2605    if prop_type not in ok_types:
2606        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2607             f"has unknown type '{prop_type}', expected one of " +
2608             ", ".join(ok_types))
2609
2610    if "specifier-space" in options and prop_type != "phandle-array":
2611        _err(f"'specifier-space' in 'properties: {prop_name}' "
2612             f"has type '{prop_type}', expected 'phandle-array'")
2613
2614    if prop_type == "phandle-array":
2615        if not prop_name.endswith("s") and not "specifier-space" in options:
2616            _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2617                 f"has type 'phandle-array' and its name does not end in 's', "
2618                 f"but no 'specifier-space' was provided.")
2619
2620    # If you change const_types, be sure to update the type annotation
2621    # for PropertySpec.const.
2622    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2623    if const and prop_type not in const_types:
2624        _err(f"const in {binding_path} for property '{prop_name}' "
2625             f"has type '{prop_type}', expected one of " +
2626             ", ".join(const_types))
2627
2628    # Check default
2629
2630    if default is None:
2631        return
2632
2633    if prop_type in {"boolean", "compound", "phandle", "phandles",
2634                     "phandle-array", "path"}:
2635        _err("'default:' can't be combined with "
2636             f"'type: {prop_type}' for '{prop_name}' in "
2637             f"'properties:' in {binding_path}")
2638
2639    def ok_default() -> bool:
2640        # Returns True if 'default' is an okay default for the property's type.
2641        # If you change this, be sure to update the type annotation for
2642        # PropertySpec.default.
2643
2644        if prop_type == "int" and isinstance(default, int) or \
2645           prop_type == "string" and isinstance(default, str):
2646            return True
2647
2648        # array, uint8-array, or string-array
2649
2650        if not isinstance(default, list):
2651            return False
2652
2653        if prop_type == "array" and \
2654           all(isinstance(val, int) for val in default):
2655            return True
2656
2657        if prop_type == "uint8-array" and \
2658           all(isinstance(val, int) and 0 <= val <= 255 for val in default):
2659            return True
2660
2661        # string-array
2662        return all(isinstance(val, str) for val in default)
2663
2664    if not ok_default():
2665        _err(f"'default: {default}' is invalid for '{prop_name}' "
2666             f"in 'properties:' in {binding_path}, "
2667             f"which has type {prop_type}")
2668
2669
2670def _translate(addr: int, node: dtlib_Node) -> int:
2671    # Recursively translates 'addr' on 'node' to the address space(s) of its
2672    # parent(s), by looking at 'ranges' properties. Returns the translated
2673    # address.
2674
2675    if not node.parent or "ranges" not in node.parent.props:
2676        # No translation
2677        return addr
2678
2679    if not node.parent.props["ranges"].value:
2680        # DT spec.: "If the property is defined with an <empty> value, it
2681        # specifies that the parent and child address space is identical, and
2682        # no address translation is required."
2683        #
2684        # Treat this the same as a 'range' that explicitly does a one-to-one
2685        # mapping, as opposed to there not being any translation.
2686        return _translate(addr, node.parent)
2687
2688    # Gives the size of each component in a translation 3-tuple in 'ranges'
2689    child_address_cells = _address_cells(node)
2690    parent_address_cells = _address_cells(node.parent)
2691    child_size_cells = _size_cells(node)
2692
2693    # Number of cells for one translation 3-tuple in 'ranges'
2694    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2695
2696    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2697                            f"4*(<#address-cells> (= {child_address_cells}) + "
2698                            "<#address-cells for parent> "
2699                            f"(= {parent_address_cells}) + "
2700                            f"<#size-cells> (= {child_size_cells}))"):
2701        child_addr = to_num(raw_range[:4*child_address_cells])
2702        raw_range = raw_range[4*child_address_cells:]
2703
2704        parent_addr = to_num(raw_range[:4*parent_address_cells])
2705        raw_range = raw_range[4*parent_address_cells:]
2706
2707        child_len = to_num(raw_range)
2708
2709        if child_addr <= addr < child_addr + child_len:
2710            # 'addr' is within range of a translation in 'ranges'. Recursively
2711            # translate it and return the result.
2712            return _translate(parent_addr + addr - child_addr, node.parent)
2713
2714    # 'addr' is not within range of any translation in 'ranges'
2715    return addr
2716
2717
2718def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2719    # Helper for registering names from <foo>-names properties.
2720    #
2721    # node:
2722    #   Node which has a property that might need named elements.
2723    #
2724    # names-ident:
2725    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2726    #
2727    # objs:
2728    #   list of objects whose .name field should be set
2729
2730    full_names_ident = names_ident + "-names"
2731
2732    if full_names_ident in node.props:
2733        names = node.props[full_names_ident].to_strings()
2734        if len(names) != len(objs):
2735            _err(f"{full_names_ident} property in {node.path} "
2736                 f"in {node.dt.filename} has {len(names)} strings, "
2737                 f"expected {len(objs)} strings")
2738
2739        for obj, name in zip(objs, names):
2740            if obj is None:
2741                continue
2742            obj.name = name
2743    else:
2744        for obj in objs:
2745            if obj is not None:
2746                obj.name = None
2747
2748
2749def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2750    # Returns the node pointed at by the closest 'interrupt-parent', searching
2751    # the parents of 'node'. As of writing, this behavior isn't specified in
2752    # the DT spec., but seems to match what some .dts files except.
2753
2754    node: Optional[dtlib_Node] = start_node
2755
2756    while node:
2757        if "interrupt-parent" in node.props:
2758            return node.props["interrupt-parent"].to_node()
2759        node = node.parent
2760
2761    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2762         f"nor any of its parents has an 'interrupt-parent' property")
2763
2764
2765def _interrupts(node: dtlib_Node) -> List[Tuple[dtlib_Node, bytes]]:
2766    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2767    # interrupt generated by 'node'. <controller> is the destination of the
2768    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2769    # the data associated with the interrupt (as a 'bytes' object).
2770
2771    # Takes precedence over 'interrupts' if both are present
2772    if "interrupts-extended" in node.props:
2773        prop = node.props["interrupts-extended"]
2774
2775        ret: List[Tuple[dtlib_Node, bytes]] = []
2776        for entry in _phandle_val_list(prop, "interrupt"):
2777            if entry is None:
2778                _err(f"node '{node.path}' interrupts-extended property "
2779                     "has an empty element")
2780            iparent, spec = entry
2781            ret.append(_map_interrupt(node, iparent, spec))
2782        return ret
2783
2784    if "interrupts" in node.props:
2785        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2786        # the same interrupt parent for all interrupts
2787
2788        iparent = _interrupt_parent(node)
2789        interrupt_cells = _interrupt_cells(iparent)
2790
2791        return [_map_interrupt(node, iparent, raw)
2792                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2793                                  "4*<#interrupt-cells>")]
2794
2795    return []
2796
2797
2798def _map_interrupt(
2799        child: dtlib_Node,
2800        parent: dtlib_Node,
2801        child_spec: bytes
2802) -> Tuple[dtlib_Node, bytes]:
2803    # Translates an interrupt headed from 'child' to 'parent' with data
2804    # 'child_spec' through any 'interrupt-map' properties. Returns a
2805    # (<controller>, <data>) tuple with the final destination after mapping.
2806
2807    if "interrupt-controller" in parent.props:
2808        return (parent, child_spec)
2809
2810    def own_address_cells(node):
2811        # Used for parents pointed at by 'interrupt-map'. We can't use
2812        # _address_cells(), because it's the #address-cells property on 'node'
2813        # itself that matters.
2814
2815        address_cells = node.props.get("#address-cells")
2816        if not address_cells:
2817            _err(f"missing #address-cells on {node!r} "
2818                 "(while handling interrupt-map)")
2819        return address_cells.to_num()
2820
2821    def spec_len_fn(node):
2822        # Can't use _address_cells() here, because it's the #address-cells
2823        # property on 'node' itself that matters
2824        return own_address_cells(node) + _interrupt_cells(node)
2825
2826    parent, raw_spec = _map(
2827        "interrupt", child, parent, _raw_unit_addr(child) + child_spec,
2828        spec_len_fn, require_controller=True)
2829
2830    # Strip the parent unit address part, if any
2831    return (parent, raw_spec[4*own_address_cells(parent):])
2832
2833
2834def _map_phandle_array_entry(
2835        child: dtlib_Node,
2836        parent: dtlib_Node,
2837        child_spec: bytes,
2838        basename: str
2839) -> Tuple[dtlib_Node, bytes]:
2840    # Returns a (<controller>, <data>) tuple with the final destination after
2841    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
2842    # _map_interrupt().
2843
2844    def spec_len_fn(node):
2845        prop_name = f"#{basename}-cells"
2846        if prop_name not in node.props:
2847            _err(f"expected '{prop_name}' property on {node!r} "
2848                 f"(referenced by {child!r})")
2849        return node.props[prop_name].to_num()
2850
2851    # Do not require <prefix>-controller for anything but interrupts for now
2852    return _map(basename, child, parent, child_spec, spec_len_fn,
2853                require_controller=False)
2854
2855
2856def _map(
2857        prefix: str,
2858        child: dtlib_Node,
2859        parent: dtlib_Node,
2860        child_spec: bytes,
2861        spec_len_fn: Callable[[dtlib_Node], int],
2862        require_controller: bool
2863) -> Tuple[dtlib_Node, bytes]:
2864    # Common code for mapping through <prefix>-map properties, e.g.
2865    # interrupt-map and gpio-map.
2866    #
2867    # prefix:
2868    #   The prefix, e.g. "interrupt" or "gpio"
2869    #
2870    # child:
2871    #   The "sender", e.g. the node with 'interrupts = <...>'
2872    #
2873    # parent:
2874    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
2875    #   'interrupt-controller' (no mapping)
2876    #
2877    # child_spec:
2878    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
2879    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
2880    #
2881    # spec_len_fn:
2882    #   Function called on a parent specified in a *-map property to get the
2883    #   length of the parent specifier (data after phandle in *-map), in cells
2884    #
2885    # require_controller:
2886    #   If True, the final controller node after mapping is required to have
2887    #   to have a <prefix>-controller property.
2888
2889    map_prop = parent.props.get(prefix + "-map")
2890    if not map_prop:
2891        if require_controller and prefix + "-controller" not in parent.props:
2892            _err(f"expected '{prefix}-controller' property on {parent!r} "
2893                 f"(referenced by {child!r})")
2894
2895        # No mapping
2896        return (parent, child_spec)
2897
2898    masked_child_spec = _mask(prefix, child, parent, child_spec)
2899
2900    raw = map_prop.value
2901    while raw:
2902        if len(raw) < len(child_spec):
2903            _err(f"bad value for {map_prop!r}, missing/truncated child data")
2904        child_spec_entry = raw[:len(child_spec)]
2905        raw = raw[len(child_spec):]
2906
2907        if len(raw) < 4:
2908            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
2909        phandle = to_num(raw[:4])
2910        raw = raw[4:]
2911
2912        # Parent specified in *-map
2913        map_parent = parent.dt.phandle2node.get(phandle)
2914        if not map_parent:
2915            _err(f"bad phandle ({phandle}) in {map_prop!r}")
2916
2917        map_parent_spec_len = 4*spec_len_fn(map_parent)
2918        if len(raw) < map_parent_spec_len:
2919            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
2920        parent_spec = raw[:map_parent_spec_len]
2921        raw = raw[map_parent_spec_len:]
2922
2923        # Got one *-map row. Check if it matches the child data.
2924        if child_spec_entry == masked_child_spec:
2925            # Handle *-map-pass-thru
2926            parent_spec = _pass_thru(
2927                prefix, child, parent, child_spec, parent_spec)
2928
2929            # Found match. Recursively map and return it.
2930            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
2931                        require_controller)
2932
2933    _err(f"child specifier for {child!r} ({child_spec!r}) "
2934         f"does not appear in {map_prop!r}")
2935
2936
2937def _mask(
2938        prefix: str,
2939        child: dtlib_Node,
2940        parent: dtlib_Node,
2941        child_spec: bytes
2942) -> bytes:
2943    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
2944    # See _map() for the parameters.
2945
2946    mask_prop = parent.props.get(prefix + "-map-mask")
2947    if not mask_prop:
2948        # No mask
2949        return child_spec
2950
2951    mask = mask_prop.value
2952    if len(mask) != len(child_spec):
2953        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
2954             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
2955
2956    return _and(child_spec, mask)
2957
2958
2959def _pass_thru(
2960        prefix: str,
2961        child: dtlib_Node,
2962        parent: dtlib_Node,
2963        child_spec: bytes,
2964        parent_spec: bytes
2965) -> bytes:
2966    # Common code for handling <prefix>-map-thru properties, e.g.
2967    # interrupt-pass-thru.
2968    #
2969    # parent_spec:
2970    #   The parent data from the matched entry in the <prefix>-map property
2971    #
2972    # See _map() for the other parameters.
2973
2974    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
2975    if not pass_thru_prop:
2976        # No pass-thru
2977        return parent_spec
2978
2979    pass_thru = pass_thru_prop.value
2980    if len(pass_thru) != len(child_spec):
2981        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
2982             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
2983
2984    res = _or(_and(child_spec, pass_thru),
2985              _and(parent_spec, _not(pass_thru)))
2986
2987    # Truncate to length of parent spec.
2988    return res[-len(parent_spec):]
2989
2990
2991def _raw_unit_addr(node: dtlib_Node) -> bytes:
2992    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
2993    # #address-cells) as a raw 'bytes'
2994
2995    if 'reg' not in node.props:
2996        _err(f"{node!r} lacks 'reg' property "
2997             "(needed for 'interrupt-map' unit address lookup)")
2998
2999    addr_len = 4*_address_cells(node)
3000
3001    if len(node.props['reg'].value) < addr_len:
3002        _err(f"{node!r} has too short 'reg' property "
3003             "(while doing 'interrupt-map' unit address lookup)")
3004
3005    return node.props['reg'].value[:addr_len]
3006
3007
3008def _and(b1: bytes, b2: bytes) -> bytes:
3009    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
3010    # with ones on the left if the lengths are not equal.
3011
3012    # Pad on the left, to equal length
3013    maxlen = max(len(b1), len(b2))
3014    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
3015                                       b2.rjust(maxlen, b'\xff')))
3016
3017
3018def _or(b1: bytes, b2: bytes) -> bytes:
3019    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3020    # zeros on the left if the lengths are not equal.
3021
3022    # Pad on the left, to equal length
3023    maxlen = max(len(b1), len(b2))
3024    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3025                                       b2.rjust(maxlen, b'\x00')))
3026
3027
3028def _not(b: bytes) -> bytes:
3029    # Returns the bitwise not of the 'bytes' object 'b'
3030
3031    # ANDing with 0xFF avoids negative numbers
3032    return bytes(~x & 0xFF for x in b)
3033
3034
3035def _phandle_val_list(
3036        prop: dtlib_Property,
3037        n_cells_name: str
3038) -> List[Optional[Tuple[dtlib_Node, bytes]]]:
3039    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3040    # cells that make up each <value> is derived from the node pointed at by
3041    # the preceding <phandle>.
3042    #
3043    # prop:
3044    #   dtlib.Property with value to parse
3045    #
3046    # n_cells_name:
3047    #   The <name> part of the #<name>-cells property to look for on the nodes
3048    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3049    #
3050    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3051    # is the node pointed at by <phandle>. If <phandle> does not refer
3052    # to a node, the entire list element is None.
3053
3054    full_n_cells_name = f"#{n_cells_name}-cells"
3055
3056    res: List[Optional[Tuple[dtlib_Node, bytes]]] = []
3057
3058    raw = prop.value
3059    while raw:
3060        if len(raw) < 4:
3061            # Not enough room for phandle
3062            _err("bad value for " + repr(prop))
3063        phandle = to_num(raw[:4])
3064        raw = raw[4:]
3065
3066        node = prop.node.dt.phandle2node.get(phandle)
3067        if not node:
3068            # Unspecified phandle-array element. This is valid; a 0
3069            # phandle value followed by no cells is an empty element.
3070            res.append(None)
3071            continue
3072
3073        if full_n_cells_name not in node.props:
3074            _err(f"{node!r} lacks {full_n_cells_name}")
3075
3076        n_cells = node.props[full_n_cells_name].to_num()
3077        if len(raw) < 4*n_cells:
3078            _err("missing data after phandle in " + repr(prop))
3079
3080        res.append((node, raw[:4*n_cells]))
3081        raw = raw[4*n_cells:]
3082
3083    return res
3084
3085
3086def _address_cells(node: dtlib_Node) -> int:
3087    # Returns the #address-cells setting for 'node', giving the number of <u32>
3088    # cells used to encode the address in the 'reg' property
3089    if TYPE_CHECKING:
3090        assert node.parent
3091
3092    if "#address-cells" in node.parent.props:
3093        return node.parent.props["#address-cells"].to_num()
3094    return 2  # Default value per DT spec.
3095
3096
3097def _size_cells(node: dtlib_Node) -> int:
3098    # Returns the #size-cells setting for 'node', giving the number of <u32>
3099    # cells used to encode the size in the 'reg' property
3100    if TYPE_CHECKING:
3101        assert node.parent
3102
3103    if "#size-cells" in node.parent.props:
3104        return node.parent.props["#size-cells"].to_num()
3105    return 1  # Default value per DT spec.
3106
3107
3108def _interrupt_cells(node: dtlib_Node) -> int:
3109    # Returns the #interrupt-cells property value on 'node', erroring out if
3110    # 'node' has no #interrupt-cells property
3111
3112    if "#interrupt-cells" not in node.props:
3113        _err(f"{node!r} lacks #interrupt-cells")
3114    return node.props["#interrupt-cells"].to_num()
3115
3116
3117def _slice(node: dtlib_Node,
3118           prop_name: str,
3119           size: int,
3120           size_hint: str) -> List[bytes]:
3121    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3122
3123
3124def _check_dt(dt: DT) -> None:
3125    # Does devicetree sanity checks. dtlib is meant to be general and
3126    # anything-goes except for very special properties like phandle, but in
3127    # edtlib we can be pickier.
3128
3129    # Check that 'status' has one of the values given in the devicetree spec.
3130
3131    # Accept "ok" for backwards compatibility
3132    ok_status = {"ok", "okay", "disabled", "reserved", "fail", "fail-sss"}
3133
3134    for node in dt.node_iter():
3135        if "status" in node.props:
3136            try:
3137                status_val = node.props["status"].to_string()
3138            except DTError as e:
3139                # The error message gives the path
3140                _err(str(e))
3141
3142            if status_val not in ok_status:
3143                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3144                     f"in {node.dt.filename}, expected one of " +
3145                     ", ".join(ok_status) +
3146                     " (see the devicetree specification)")
3147
3148        ranges_prop = node.props.get("ranges")
3149        if ranges_prop:
3150            if ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3151                _err(f"expected 'ranges = < ... >;' in {node.path} in "
3152                     f"{node.dt.filename}, not '{ranges_prop}' "
3153                     "(see the devicetree specification)")
3154
3155
3156def _err(msg) -> NoReturn:
3157    raise EDTError(msg)
3158
3159# Logging object
3160_LOG = logging.getLogger(__name__)
3161
3162# Regular expression for non-alphanumeric-or-underscore characters.
3163_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3164
3165
3166def str_as_token(val: str) -> str:
3167    """Return a canonical representation of a string as a C token.
3168
3169    This converts special characters in 'val' to underscores, and
3170    returns the result."""
3171
3172    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3173
3174
3175# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3176# which could interfere with YAML loading in clients
3177class _BindingLoader(Loader):
3178    pass
3179
3180
3181# Add legacy '!include foo.yaml' handling
3182_BindingLoader.add_constructor("!include", _binding_include)
3183
3184#
3185# "Default" binding for properties which are defined by the spec.
3186#
3187# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3188# updating the documentation for the DT_PROP() macro in
3189# include/devicetree.h.
3190#
3191
3192_DEFAULT_PROP_TYPES: Dict[str, str] = {
3193    "compatible": "string-array",
3194    "status": "string",
3195    "ranges": "compound",  # NUMS or EMPTY
3196    "reg": "array",
3197    "reg-names": "string-array",
3198    "label": "string",
3199    "interrupts": "array",
3200    "interrupts-extended": "compound",
3201    "interrupt-names": "string-array",
3202    "interrupt-controller": "boolean",
3203}
3204
3205_STATUS_ENUM: List[str] = "ok okay disabled reserved fail fail-sss".split()
3206
3207def _raw_default_property_for(
3208        name: str
3209) -> Dict[str, Union[str, bool, List[str]]]:
3210    ret: Dict[str, Union[str, bool, List[str]]] = {
3211        'type': _DEFAULT_PROP_TYPES[name],
3212        'required': False,
3213    }
3214    if name == 'status':
3215        ret['enum'] = _STATUS_ENUM
3216    return ret
3217
3218_DEFAULT_PROP_BINDING: Binding = Binding(
3219    None, {},
3220    raw={
3221        'properties': {
3222            name: _raw_default_property_for(name)
3223            for name in _DEFAULT_PROP_TYPES
3224        },
3225    },
3226    require_compatible=False, require_description=False,
3227)
3228
3229_DEFAULT_PROP_SPECS: Dict[str, PropertySpec] = {
3230    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3231    for name in _DEFAULT_PROP_TYPES
3232}
3233