1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# SPDX-License-Identifier: BSD-3-Clause
4
5# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
6
7"""
8Library for working with devicetrees at a higher level compared to dtlib. Like
9dtlib, this library presents a tree of devicetree nodes, but the nodes are
10augmented with information from bindings and include some interpretation of
11properties. Some of this interpretation is based on conventions established
12by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
13source code is sometimes good reference material.
14
15Bindings are YAML files that describe devicetree nodes. Devicetree
16nodes are usually mapped to bindings via their 'compatible = "..."' property,
17but a binding can also come from a 'child-binding:' key in the binding for the
18parent devicetree node.
19
20Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
21which has all the information related to the node.
22
23The top-level entry points for the library are the EDT and Binding classes.
24See their constructor docstrings for details. There is also a
25bindings_from_paths() helper function.
26"""
27
28# NOTE: tests/test_edtlib.py is the test suite for this library.
29
30# Implementation notes
31# --------------------
32#
33# A '_' prefix on an identifier in Python is a convention for marking it private.
34# Please do not access private things. Instead, think of what API you need, and
35# add it.
36#
37# This module is not meant to have any global state. It should be possible to
38# create several EDT objects with independent binding paths and flags. If you
39# need to add a configuration parameter or the like, store it in the EDT
40# instance, and initialize it e.g. with a constructor argument.
41#
42# This library is layered on top of dtlib, and is not meant to expose it to
43# clients. This keeps the header generation script simple.
44#
45# General biased advice:
46#
47# - Consider using @property for APIs that don't need parameters. It makes
48#   functions look like attributes, which is less awkward in clients, and makes
49#   it easy to switch back and forth between variables and functions.
50#
51# - Think about the data type of the thing you're exposing. Exposing something
52#   as e.g. a list or a dictionary is often nicer and more flexible than adding
53#   a function.
54#
55# - Avoid get_*() prefixes on functions. Name them after the thing they return
56#   instead. This often makes the code read more naturally in callers.
57#
58#   Also, consider using @property instead of get_*().
59#
60# - Don't expose dtlib stuff directly.
61#
62# - Add documentation for any new APIs you add.
63#
64#   The convention here is that docstrings (quoted strings) are used for public
65#   APIs, and "doc comments" for internal functions.
66#
67#   @properties are documented in the class docstring, as if they were
68#   variables. See the existing @properties for a template.
69
70from collections import defaultdict
71from copy import deepcopy
72from dataclasses import dataclass
73from typing import Any, Callable, Dict, Iterable, List, NoReturn, \
74    Optional, Set, TYPE_CHECKING, Tuple, Union
75import logging
76import os
77import re
78
79import yaml
80try:
81    # Use the C LibYAML parser if available, rather than the Python parser.
82    # This makes e.g. gen_defines.py more than twice as fast.
83    from yaml import CLoader as Loader
84except ImportError:
85    from yaml import Loader     # type: ignore
86
87from devicetree.dtlib import DT, DTError, to_num, to_nums, Type
88from devicetree.dtlib import Node as dtlib_Node
89from devicetree.dtlib import Property as dtlib_Property
90from devicetree.grutils import Graph
91from devicetree._private import _slice_helper
92
93#
94# Public classes
95#
96
97
98class Binding:
99    """
100    Represents a parsed binding.
101
102    These attributes are available on Binding objects:
103
104    path:
105      The absolute path to the file defining the binding.
106
107    description:
108      The free-form description of the binding, or None.
109
110    compatible:
111      The compatible string the binding matches.
112
113      This may be None. For example, it's None when the Binding is inferred
114      from node properties. It can also be None for Binding objects created
115      using 'child-binding:' with no compatible.
116
117    prop2specs:
118      A dict mapping property names to PropertySpec objects
119      describing those properties' values.
120
121    specifier2cells:
122      A dict that maps specifier space names (like "gpio",
123      "clock", "pwm", etc.) to lists of cell names.
124
125      For example, if the binding YAML contains 'pin' and 'flags' cell names
126      for the 'gpio' specifier space, like this:
127
128          gpio-cells:
129          - pin
130          - flags
131
132      Then the Binding object will have a 'specifier2cells' attribute mapping
133      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
134      cells.
135
136    raw:
137      The binding as an object parsed from YAML.
138
139    bus:
140      If nodes with this binding's 'compatible' describe a bus, a string
141      describing the bus type (like "i2c") or a list describing supported
142      protocols (like ["i3c", "i2c"]). None otherwise.
143
144      Note that this is the raw value from the binding where it can be
145      a string or a list. Use "buses" instead unless you need the raw
146      value, where "buses" is always a list.
147
148    buses:
149      Deprived property from 'bus' where 'buses' is a list of bus(es),
150      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
151      no 'bus:' in this binding.
152
153    on_bus:
154      If nodes with this binding's 'compatible' appear on a bus, a string
155      describing the bus type (like "i2c"). None otherwise.
156
157    child_binding:
158      If this binding describes the properties of child nodes, then
159      this is a Binding object for those children; it is None otherwise.
160      A Binding object's 'child_binding.child_binding' is not None if there
161      are multiple levels of 'child-binding' descriptions in the binding.
162    """
163
164    def __init__(self, path: Optional[str], fname2path: Dict[str, str],
165                 raw: Any = None, require_compatible: bool = True,
166                 require_description: bool = True):
167        """
168        Binding constructor.
169
170        path:
171          Path to binding YAML file. May be None.
172
173        fname2path:
174          Map from include files to their absolute paths. Must
175          not be None, but may be empty.
176
177        raw:
178          Optional raw content in the binding.
179          This does not have to have any "include:" lines resolved.
180          May be left out, in which case 'path' is opened and read.
181          This can be used to resolve child bindings, for example.
182
183        require_compatible:
184          If True, it is an error if the binding does not contain a
185          "compatible:" line. If False, a missing "compatible:" is
186          not an error. Either way, "compatible:" must be a string
187          if it is present in the binding.
188
189        require_description:
190          If True, it is an error if the binding does not contain a
191          "description:" line. If False, a missing "description:" is
192          not an error. Either way, "description:" must be a string
193          if it is present in the binding.
194        """
195        self.path: Optional[str] = path
196        self._fname2path: Dict[str, str] = fname2path
197
198        if raw is None:
199            if path is None:
200                _err("you must provide either a 'path' or a 'raw' argument")
201            with open(path, encoding="utf-8") as f:
202                raw = yaml.load(f, Loader=_BindingLoader)
203
204        # Merge any included files into self.raw. This also pulls in
205        # inherited child binding definitions, so it has to be done
206        # before initializing those.
207        self.raw: dict = self._merge_includes(raw, self.path)
208
209        # Recursively initialize any child bindings. These don't
210        # require a 'compatible' or 'description' to be well defined,
211        # but they must be dicts.
212        if "child-binding" in raw:
213            if not isinstance(raw["child-binding"], dict):
214                _err(f"malformed 'child-binding:' in {self.path}, "
215                     "expected a binding (dictionary with keys/values)")
216            self.child_binding: Optional['Binding'] = Binding(
217                path, fname2path,
218                raw=raw["child-binding"],
219                require_compatible=False,
220                require_description=False)
221        else:
222            self.child_binding = None
223
224        # Make sure this is a well defined object.
225        self._check(require_compatible, require_description)
226
227        # Initialize look up tables.
228        self.prop2specs: Dict[str, 'PropertySpec'] = {}
229        for prop_name in self.raw.get("properties", {}).keys():
230            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
231        self.specifier2cells: Dict[str, List[str]] = {}
232        for key, val in self.raw.items():
233            if key.endswith("-cells"):
234                self.specifier2cells[key[:-len("-cells")]] = val
235
236    def __repr__(self) -> str:
237        if self.compatible:
238            compat = f" for compatible '{self.compatible}'"
239        else:
240            compat = ""
241        basename = os.path.basename(self.path or "")
242        return f"<Binding {basename}" + compat + ">"
243
244    @property
245    def description(self) -> Optional[str]:
246        "See the class docstring"
247        return self.raw.get('description')
248
249    @property
250    def compatible(self) -> Optional[str]:
251        "See the class docstring"
252        return self.raw.get('compatible')
253
254    @property
255    def bus(self) -> Union[None, str, List[str]]:
256        "See the class docstring"
257        return self.raw.get('bus')
258
259    @property
260    def buses(self) -> List[str]:
261        "See the class docstring"
262        if self.raw.get('bus') is not None:
263            return self._buses
264        else:
265            return []
266
267    @property
268    def on_bus(self) -> Optional[str]:
269        "See the class docstring"
270        return self.raw.get('on-bus')
271
272    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
273        # Constructor helper. Merges included files in
274        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
275        # source of include files, removing the "include" key while
276        # doing so.
277        #
278        # This treats 'binding_path' as the binding file being built up
279        # and uses it for error messages.
280
281        if "include" not in raw:
282            return raw
283
284        include = raw.pop("include")
285
286        # First, merge the included files together. If more than one included
287        # file has a 'required:' for a particular property, OR the values
288        # together, so that 'required: true' wins.
289
290        merged: Dict[str, Any] = {}
291
292        if isinstance(include, str):
293            # Simple scalar string case
294            _merge_props(merged, self._load_raw(include), None, binding_path,
295                         False)
296        elif isinstance(include, list):
297            # List of strings and maps. These types may be intermixed.
298            for elem in include:
299                if isinstance(elem, str):
300                    _merge_props(merged, self._load_raw(elem), None,
301                                 binding_path, False)
302                elif isinstance(elem, dict):
303                    name = elem.pop('name', None)
304                    allowlist = elem.pop('property-allowlist', None)
305                    blocklist = elem.pop('property-blocklist', None)
306                    child_filter = elem.pop('child-binding', None)
307
308                    if elem:
309                        # We've popped out all the valid keys.
310                        _err(f"'include:' in {binding_path} should not have "
311                             f"these unexpected contents: {elem}")
312
313                    _check_include_dict(name, allowlist, blocklist,
314                                        child_filter, binding_path)
315
316                    contents = self._load_raw(name)
317
318                    _filter_properties(contents, allowlist, blocklist,
319                                       child_filter, binding_path)
320                    _merge_props(merged, contents, None, binding_path, False)
321                else:
322                    _err(f"all elements in 'include:' in {binding_path} "
323                         "should be either strings or maps with a 'name' key "
324                         "and optional 'property-allowlist' or "
325                         f"'property-blocklist' keys, but got: {elem}")
326        else:
327            # Invalid item.
328            _err(f"'include:' in {binding_path} "
329                 f"should be a string or list, but has type {type(include)}")
330
331        # Next, merge the merged included files into 'raw'. Error out if
332        # 'raw' has 'required: false' while the merged included files have
333        # 'required: true'.
334
335        _merge_props(raw, merged, None, binding_path, check_required=True)
336
337        return raw
338
339    def _load_raw(self, fname: str) -> dict:
340        # Returns the contents of the binding given by 'fname' after merging
341        # any bindings it lists in 'include:' into it. 'fname' is just the
342        # basename of the file, so we check that there aren't multiple
343        # candidates.
344
345        path = self._fname2path.get(fname)
346
347        if not path:
348            _err(f"'{fname}' not found")
349
350        with open(path, encoding="utf-8") as f:
351            contents = yaml.load(f, Loader=_BindingLoader)
352            if not isinstance(contents, dict):
353                _err(f'{path}: invalid contents, expected a mapping')
354
355        return self._merge_includes(contents, path)
356
357    def _check(self, require_compatible: bool, require_description: bool):
358        # Does sanity checking on the binding.
359
360        raw = self.raw
361
362        if "compatible" in raw:
363            compatible = raw["compatible"]
364            if not isinstance(compatible, str):
365                _err(f"malformed 'compatible: {compatible}' "
366                     f"field in {self.path} - "
367                     f"should be a string, not {type(compatible).__name__}")
368        elif require_compatible:
369            _err(f"missing 'compatible' in {self.path}")
370
371        if "description" in raw:
372            description = raw["description"]
373            if not isinstance(description, str) or not description:
374                _err(f"malformed or empty 'description' in {self.path}")
375        elif require_description:
376            _err(f"missing 'description' in {self.path}")
377
378        # Allowed top-level keys. The 'include' key should have been
379        # removed by _load_raw() already.
380        ok_top = {"description", "compatible", "bus", "on-bus",
381                  "properties", "child-binding"}
382
383        # Descriptive errors for legacy bindings.
384        legacy_errors = {
385            "#cells": "expected *-cells syntax",
386            "child": "use 'bus: <bus>' instead",
387            "child-bus": "use 'bus: <bus>' instead",
388            "parent": "use 'on-bus: <bus>' instead",
389            "parent-bus": "use 'on-bus: <bus>' instead",
390            "sub-node": "use 'child-binding' instead",
391            "title": "use 'description' instead",
392        }
393
394        for key in raw:
395            if key in legacy_errors:
396                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
397
398            if key not in ok_top and not key.endswith("-cells"):
399                _err(f"unknown key '{key}' in {self.path}, "
400                     "expected one of {', '.join(ok_top)}, or *-cells")
401
402        if "bus" in raw:
403            bus = raw["bus"]
404            if not isinstance(bus, str) and \
405               (not isinstance(bus, list) and \
406                not all(isinstance(elem, str) for elem in bus)):
407                _err(f"malformed 'bus:' value in {self.path}, "
408                     "expected string or list of strings")
409
410            if isinstance(bus, list):
411                self._buses = bus
412            else:
413                # Convert bus into a list
414                self._buses = [bus]
415
416        if "on-bus" in raw and \
417           not isinstance(raw["on-bus"], str):
418            _err(f"malformed 'on-bus:' value in {self.path}, "
419                 "expected string")
420
421        self._check_properties()
422
423        for key, val in raw.items():
424            if key.endswith("-cells"):
425                if not isinstance(val, list) or \
426                   not all(isinstance(elem, str) for elem in val):
427                    _err(f"malformed '{key}:' in {self.path}, "
428                         "expected a list of strings")
429
430    def _check_properties(self) -> None:
431        # _check() helper for checking the contents of 'properties:'.
432
433        raw = self.raw
434
435        if "properties" not in raw:
436            return
437
438        ok_prop_keys = {"description", "type", "required",
439                        "enum", "const", "default", "deprecated",
440                        "specifier-space"}
441
442        for prop_name, options in raw["properties"].items():
443            for key in options:
444                if key not in ok_prop_keys:
445                    _err(f"unknown setting '{key}' in "
446                         f"'properties: {prop_name}: ...' in {self.path}, "
447                         f"expected one of {', '.join(ok_prop_keys)}")
448
449            _check_prop_by_type(prop_name, options, self.path)
450
451            for true_false_opt in ["required", "deprecated"]:
452                if true_false_opt in options:
453                    option = options[true_false_opt]
454                    if not isinstance(option, bool):
455                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
456                             f"for '{prop_name}' in 'properties' in {self.path}, "
457                             "expected true/false")
458
459            if options.get("deprecated") and options.get("required"):
460                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
461                      "have both 'deprecated' and 'required' set")
462
463            if "description" in options and \
464               not isinstance(options["description"], str):
465                _err("missing, malformed, or empty 'description' for "
466                     f"'{prop_name}' in 'properties' in {self.path}")
467
468            if "enum" in options and not isinstance(options["enum"], list):
469                _err(f"enum in {self.path} for property '{prop_name}' "
470                     "is not a list")
471
472
473class PropertySpec:
474    """
475    Represents a "property specification", i.e. the description of a
476    property provided by a binding file, like its type and description.
477
478    These attributes are available on PropertySpec objects:
479
480    binding:
481      The Binding object which defined this property.
482
483    name:
484      The property's name.
485
486    path:
487      The file where this property was defined. In case a binding includes
488      other bindings, this is the file where the property was last modified.
489
490    type:
491      The type of the property as a string, as given in the binding.
492
493    description:
494      The free-form description of the property as a string, or None.
495
496    enum:
497      A list of values the property may take as given in the binding, or None.
498
499    enum_tokenizable:
500      True if enum is not None and all the values in it are tokenizable;
501      False otherwise.
502
503      A property must have string type and an "enum:" in its binding to be
504      tokenizable. Additionally, the "enum:" values must be unique after
505      converting all non-alphanumeric characters to underscores (so "foo bar"
506      and "foo_bar" in the same "enum:" would not be tokenizable).
507
508    enum_upper_tokenizable:
509      Like 'enum_tokenizable', with the additional restriction that the
510      "enum:" values must be unique after uppercasing and converting
511      non-alphanumeric characters to underscores.
512
513    const:
514      The property's constant value as given in the binding, or None.
515
516    default:
517      The property's default value as given in the binding, or None.
518
519    deprecated:
520      True if the property is deprecated; False otherwise.
521
522    required:
523      True if the property is marked required; False otherwise.
524
525    specifier_space:
526      The specifier space for the property as given in the binding, or None.
527    """
528
529    def __init__(self, name: str, binding: Binding):
530        self.binding: Binding = binding
531        self.name: str = name
532        self._raw: Dict[str, Any] = self.binding.raw["properties"][name]
533
534    def __repr__(self) -> str:
535        return f"<PropertySpec {self.name} type '{self.type}'>"
536
537    @property
538    def path(self) -> Optional[str]:
539        "See the class docstring"
540        return self.binding.path
541
542    @property
543    def type(self) -> str:
544        "See the class docstring"
545        return self._raw["type"]
546
547    @property
548    def description(self) -> Optional[str]:
549        "See the class docstring"
550        return self._raw.get("description")
551
552    @property
553    def enum(self) -> Optional[list]:
554        "See the class docstring"
555        return self._raw.get("enum")
556
557    @property
558    def enum_tokenizable(self) -> bool:
559        "See the class docstring"
560        if not hasattr(self, '_enum_tokenizable'):
561            if self.type != 'string' or self.enum is None:
562                self._enum_tokenizable = False
563            else:
564                # Saving _as_tokens here lets us reuse it in
565                # enum_upper_tokenizable.
566                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
567                                          '_', value)
568                                   for value in self.enum]
569                self._enum_tokenizable = (len(self._as_tokens) ==
570                                          len(set(self._as_tokens)))
571
572        return self._enum_tokenizable
573
574    @property
575    def enum_upper_tokenizable(self) -> bool:
576        "See the class docstring"
577        if not hasattr(self, '_enum_upper_tokenizable'):
578            if not self.enum_tokenizable:
579                self._enum_upper_tokenizable = False
580            else:
581                self._enum_upper_tokenizable = \
582                    (len(self._as_tokens) ==
583                     len(set(x.upper() for x in self._as_tokens)))
584        return self._enum_upper_tokenizable
585
586    @property
587    def const(self) -> Union[None, int, List[int], str, List[str]]:
588        "See the class docstring"
589        return self._raw.get("const")
590
591    @property
592    def default(self) -> Union[None, int, List[int], str, List[str]]:
593        "See the class docstring"
594        return self._raw.get("default")
595
596    @property
597    def required(self) -> bool:
598        "See the class docstring"
599        return self._raw.get("required", False)
600
601    @property
602    def deprecated(self) -> bool:
603        "See the class docstring"
604        return self._raw.get("deprecated", False)
605
606    @property
607    def specifier_space(self) -> Optional[str]:
608        "See the class docstring"
609        return self._raw.get("specifier-space")
610
611PropertyValType = Union[int, str,
612                        List[int], List[str],
613                        'Node', List['Node'],
614                        List[Optional['ControllerAndData']],
615                        bytes, None]
616
617
618@dataclass
619class Property:
620    """
621    Represents a property on a Node, as set in its DT node and with
622    additional info from the 'properties:' section of the binding.
623
624    Only properties mentioned in 'properties:' get created. Properties of type
625    'compound' currently do not get Property instances, as it's not clear
626    what to generate for them.
627
628    These attributes are available on Property objects. Several are
629    just convenience accessors for attributes on the PropertySpec object
630    accessible via the 'spec' attribute.
631
632    These attributes are available on Property objects:
633
634    spec:
635      The PropertySpec object which specifies this property.
636
637    val:
638      The value of the property, with the format determined by spec.type,
639      which comes from the 'type:' string in the binding.
640
641        - For 'type: int/array/string/string-array', 'val' is what you'd expect
642          (a Python integer or string, or a list of them)
643
644        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
645          instance
646
647        - For 'type: phandles', 'val' is a list of the pointed-to Node
648          instances
649
650        - For 'type: phandle-array', 'val' is a list of ControllerAndData
651          instances. See the documentation for that class.
652
653    node:
654      The Node instance the property is on
655
656    name:
657      Convenience for spec.name.
658
659    description:
660      Convenience for spec.description with leading and trailing whitespace
661      (including newlines) removed. May be None.
662
663    type:
664      Convenience for spec.type.
665
666    val_as_token:
667      The value of the property as a token, i.e. with non-alphanumeric
668      characters replaced with underscores. This is only safe to access
669      if 'spec.enum_tokenizable' returns True.
670
671    enum_index:
672      The index of 'val' in 'spec.enum' (which comes from the 'enum:' list
673      in the binding), or None if spec.enum is None.
674    """
675
676    spec: PropertySpec
677    val: PropertyValType
678    node: 'Node'
679
680    @property
681    def name(self) -> str:
682        "See the class docstring"
683        return self.spec.name
684
685    @property
686    def description(self) -> Optional[str]:
687        "See the class docstring"
688        return self.spec.description.strip() if self.spec.description else None
689
690    @property
691    def type(self) -> str:
692        "See the class docstring"
693        return self.spec.type
694
695    @property
696    def val_as_token(self) -> str:
697        "See the class docstring"
698        assert isinstance(self.val, str)
699        return str_as_token(self.val)
700
701    @property
702    def enum_index(self) -> Optional[int]:
703        "See the class docstring"
704        enum = self.spec.enum
705        return enum.index(self.val) if enum else None
706
707
708@dataclass
709class Register:
710    """
711    Represents a register on a node.
712
713    These attributes are available on Register objects:
714
715    node:
716      The Node instance this register is from
717
718    name:
719      The name of the register as given in the 'reg-names' property, or None if
720      there is no 'reg-names' property
721
722    addr:
723      The starting address of the register, in the parent address space, or None
724      if #address-cells is zero. Any 'ranges' properties are taken into account.
725
726    size:
727      The length of the register in bytes
728    """
729
730    node: 'Node'
731    name: Optional[str]
732    addr: Optional[int]
733    size: Optional[int]
734
735
736@dataclass
737class Range:
738    """
739    Represents a translation range on a node as described by the 'ranges' property.
740
741    These attributes are available on Range objects:
742
743    node:
744      The Node instance this range is from
745
746    child_bus_cells:
747      The number of cells used to describe a child bus address.
748
749    child_bus_addr:
750      A physical address within the child bus address space, or None if the
751      child's #address-cells equals 0.
752
753    parent_bus_cells:
754      The number of cells used to describe a parent bus address.
755
756    parent_bus_addr:
757      A physical address within the parent bus address space, or None if the
758      parent's #address-cells equals 0.
759
760    length_cells:
761      The number of cells used to describe the size of range in
762      the child's address space.
763
764    length:
765      The size of the range in the child address space, or None if the
766      child's #size-cells equals 0.
767    """
768    node: 'Node'
769    child_bus_cells: int
770    child_bus_addr: Optional[int]
771    parent_bus_cells: int
772    parent_bus_addr: Optional[int]
773    length_cells: int
774    length: Optional[int]
775
776
777@dataclass
778class ControllerAndData:
779    """
780    Represents an entry in an 'interrupts' or 'type: phandle-array' property
781    value, e.g. <&ctrl-1 4 0> in
782
783        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
784
785    These attributes are available on ControllerAndData objects:
786
787    node:
788      The Node instance the property appears on
789
790    controller:
791      The Node instance for the controller (e.g. the controller the interrupt
792      gets sent to for interrupts)
793
794    data:
795      A dictionary that maps names from the *-cells key in the binding for the
796      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
797      above.
798
799      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
800
801    name:
802      The name of the entry as given in
803      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
804      *-names property
805
806    basename:
807      Basename for the controller when supporting named cells
808    """
809    node: 'Node'
810    controller: 'Node'
811    data: dict
812    name: Optional[str]
813    basename: Optional[str]
814
815
816@dataclass
817class PinCtrl:
818    """
819    Represents a pin control configuration for a set of pins on a device,
820    e.g. pinctrl-0 or pinctrl-1.
821
822    These attributes are available on PinCtrl objects:
823
824    node:
825      The Node instance the pinctrl-* property is on
826
827    name:
828      The name of the configuration, as given in pinctrl-names, or None if
829      there is no pinctrl-names property
830
831    name_as_token:
832      Like 'name', but with non-alphanumeric characters converted to underscores.
833
834    conf_nodes:
835      A list of Node instances for the pin configuration nodes, e.g.
836      the nodes pointed at by &state_1 and &state_2 in
837
838          pinctrl-0 = <&state_1 &state_2>;
839    """
840
841    node: 'Node'
842    name: Optional[str]
843    conf_nodes: List['Node']
844
845    @property
846    def name_as_token(self):
847        "See the class docstring"
848        return str_as_token(self.name) if self.name is not None else None
849
850
851class Node:
852    """
853    Represents a devicetree node, augmented with information from bindings, and
854    with some interpretation of devicetree properties. There's a one-to-one
855    correspondence between devicetree nodes and Nodes.
856
857    These attributes are available on Node objects:
858
859    edt:
860      The EDT instance this node is from
861
862    name:
863      The name of the node
864
865    unit_addr:
866      An integer with the ...@<unit-address> portion of the node name,
867      translated through any 'ranges' properties on parent nodes, or None if
868      the node name has no unit-address portion
869
870    description:
871      The description string from the binding for the node, or None if the node
872      has no binding. Leading and trailing whitespace (including newlines) is
873      removed.
874
875    path:
876      The devicetree path of the node
877
878    label:
879      The text from the 'label' property on the node, or None if the node has
880      no 'label'
881
882    labels:
883      A list of all of the devicetree labels for the node, in the same order
884      as the labels appear, but with duplicates removed.
885
886      This corresponds to the actual devicetree source labels, unlike the
887      "label" attribute, which is the value of a devicetree property named
888      "label".
889
890    parent:
891      The Node instance for the devicetree parent of the Node, or None if the
892      node is the root node
893
894    children:
895      A dictionary with the Node instances for the devicetree children of the
896      node, indexed by name
897
898    dep_ordinal:
899      A non-negative integer value such that the value for a Node is
900      less than the value for all Nodes that depend on it.
901
902      The ordinal is defined for all Nodes, and is unique among nodes in its
903      EDT 'nodes' list.
904
905    required_by:
906      A list with the nodes that directly depend on the node
907
908    depends_on:
909      A list with the nodes that the node directly depends on
910
911    status:
912      The node's status property value, as a string, or "okay" if the node
913      has no status property set. If the node's status property is "ok",
914      it is converted to "okay" for consistency.
915
916    read_only:
917      True if the node has a 'read-only' property, and False otherwise
918
919    matching_compat:
920      The 'compatible' string for the binding that matched the node, or None if
921      the node has no binding
922
923    binding_path:
924      The path to the binding file for the node, or None if the node has no
925      binding
926
927    compats:
928      A list of 'compatible' strings for the node, in the same order that
929      they're listed in the .dts file
930
931    ranges:
932      A list of Range objects extracted from the node's ranges property.
933      The list is empty if the node does not have a range property.
934
935    regs:
936      A list of Register objects for the node's registers
937
938    props:
939      A dict that maps property names to Property objects.
940      Property objects are created for all devicetree properties on the node
941      that are mentioned in 'properties:' in the binding.
942
943    aliases:
944      A list of aliases for the node. This is fetched from the /aliases node.
945
946    interrupts:
947      A list of ControllerAndData objects for the interrupts generated by the
948      node. The list is empty if the node does not generate interrupts.
949
950    pinctrls:
951      A list of PinCtrl objects for the pinctrl-<index> properties on the
952      node, sorted by index. The list is empty if the node does not have any
953      pinctrl-<index> properties.
954
955    buses:
956      If the node is a bus node (has a 'bus:' key in its binding), then this
957      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
958      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
959      If the node is not a bus node, then this attribute is an empty list.
960
961    on_buses:
962      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
963      multiple protocols are supported via the same bus. The bus is determined
964      by searching upwards for a parent node whose binding has a 'bus:' key,
965      returning the value of the first 'bus:' key found. If none of the node's
966      parents has a 'bus:' key, this attribute is an empty list.
967
968    bus_node:
969      Like on_bus, but contains the Node for the bus controller, or None if the
970      node is not on a bus.
971
972    flash_controller:
973      The flash controller for the node. Only meaningful for nodes representing
974      flash partitions.
975
976    spi_cs_gpio:
977      The device's SPI GPIO chip select as a ControllerAndData instance, if it
978      exists, and None otherwise. See
979      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
980
981    gpio_hogs:
982      A list of ControllerAndData objects for the GPIOs hogged by the node. The
983      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
984      nodes.
985    """
986
987    def __init__(self,
988                 dt_node: dtlib_Node,
989                 edt: 'EDT',
990                 compats: List[str]):
991        '''
992        For internal use only; not meant to be used outside edtlib itself.
993        '''
994        # Public, some of which are initialized properly later:
995        self.edt: 'EDT' = edt
996        self.dep_ordinal: int = -1
997        self.matching_compat: Optional[str] = None
998        self.binding_path: Optional[str] = None
999        self.compats: List[str] = compats
1000        self.ranges: List[Range] = []
1001        self.regs: List[Register] = []
1002        self.props: Dict[str, Property] = {}
1003        self.interrupts: List[ControllerAndData] = []
1004        self.pinctrls: List[PinCtrl] = []
1005        self.bus_node: Optional['Node'] = None
1006
1007        # Private, don't touch outside the class:
1008        self._node: dtlib_Node = dt_node
1009        self._binding: Optional[Binding] = None
1010
1011    @property
1012    def name(self) -> str:
1013        "See the class docstring"
1014        return self._node.name
1015
1016    @property
1017    def unit_addr(self) -> Optional[int]:
1018        "See the class docstring"
1019
1020        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1021
1022        if "@" not in self.name:
1023            return None
1024
1025        try:
1026            addr = int(self.name.split("@", 1)[1], 16)
1027        except ValueError:
1028            _err(f"{self!r} has non-hex unit address")
1029
1030        return _translate(addr, self._node)
1031
1032    @property
1033    def description(self) -> Optional[str]:
1034        "See the class docstring."
1035        if self._binding:
1036            return self._binding.description
1037        return None
1038
1039    @property
1040    def path(self) ->  str:
1041        "See the class docstring"
1042        return self._node.path
1043
1044    @property
1045    def label(self) -> Optional[str]:
1046        "See the class docstring"
1047        if "label" in self._node.props:
1048            return self._node.props["label"].to_string()
1049        return None
1050
1051    @property
1052    def labels(self) -> List[str]:
1053        "See the class docstring"
1054        return self._node.labels
1055
1056    @property
1057    def parent(self) -> Optional['Node']:
1058        "See the class docstring"
1059        return self.edt._node2enode.get(self._node.parent) # type: ignore
1060
1061    @property
1062    def children(self) -> Dict[str, 'Node']:
1063        "See the class docstring"
1064        # Could be initialized statically too to preserve identity, but not
1065        # sure if needed. Parent nodes being initialized before their children
1066        # would need to be kept in mind.
1067        return {name: self.edt._node2enode[node]
1068                for name, node in self._node.nodes.items()}
1069
1070    def child_index(self, node) -> int:
1071        """Get the index of *node* in self.children.
1072        Raises KeyError if the argument is not a child of this node.
1073        """
1074        if not hasattr(self, '_child2index'):
1075            # Defer initialization of this lookup table until this
1076            # method is callable to handle parents needing to be
1077            # initialized before their chidlren. By the time we
1078            # return from __init__, 'self.children' is callable.
1079            self._child2index: Dict[str, int] = {}
1080            for index, child_path in enumerate(child.path for child in
1081                                               self.children.values()):
1082                self._child2index[child_path] = index
1083
1084        return self._child2index[node.path]
1085
1086    @property
1087    def required_by(self) -> List['Node']:
1088        "See the class docstring"
1089        return self.edt._graph.required_by(self)
1090
1091    @property
1092    def depends_on(self) -> List['Node']:
1093        "See the class docstring"
1094        return self.edt._graph.depends_on(self)
1095
1096    @property
1097    def status(self) -> str:
1098        "See the class docstring"
1099        status = self._node.props.get("status")
1100
1101        if status is None:
1102            as_string = "okay"
1103        else:
1104            as_string = status.to_string()
1105
1106        if as_string == "ok":
1107            as_string = "okay"
1108
1109        return as_string
1110
1111    @property
1112    def read_only(self) -> bool:
1113        "See the class docstring"
1114        return "read-only" in self._node.props
1115
1116    @property
1117    def aliases(self) -> List[str]:
1118        "See the class docstring"
1119        return [alias for alias, node in self._node.dt.alias2node.items()
1120                if node is self._node]
1121
1122    @property
1123    def buses(self) -> List[str]:
1124        "See the class docstring"
1125        if self._binding:
1126            return self._binding.buses
1127        return []
1128
1129    @property
1130    def on_buses(self) -> List[str]:
1131        "See the class docstring"
1132        bus_node = self.bus_node
1133        return bus_node.buses if bus_node else []
1134
1135    @property
1136    def flash_controller(self) -> 'Node':
1137        "See the class docstring"
1138
1139        # The node path might be something like
1140        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1141        # up two levels to get the flash and check its compat. The flash
1142        # controller might be the flash itself (for cases like NOR flashes).
1143        # For the case of 'soc-nv-flash', we assume the controller is the
1144        # parent of the flash node.
1145
1146        if not self.parent or not self.parent.parent:
1147            _err(f"flash partition {self!r} lacks parent or grandparent node")
1148
1149        controller = self.parent.parent
1150        if controller.matching_compat == "soc-nv-flash":
1151            if controller.parent is None:
1152                _err(f"flash controller '{controller.path}' cannot be the root node")
1153            return controller.parent
1154        return controller
1155
1156    @property
1157    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1158        "See the class docstring"
1159
1160        if not ("spi" in self.on_buses
1161                and self.bus_node
1162                and "cs-gpios" in self.bus_node.props):
1163            return None
1164
1165        if not self.regs:
1166            _err(f"{self!r} needs a 'reg' property, to look up the "
1167                 "chip select index for SPI")
1168
1169        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1170        if TYPE_CHECKING:
1171            assert isinstance(parent_cs_lst, list)
1172
1173        # cs-gpios is indexed by the unit address
1174        cs_index = self.regs[0].addr
1175        if TYPE_CHECKING:
1176            assert isinstance(cs_index, int)
1177
1178        if cs_index >= len(parent_cs_lst):
1179            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1180                 "is >= number of cs-gpios in "
1181                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1182
1183        ret = parent_cs_lst[cs_index]
1184        if TYPE_CHECKING:
1185            assert isinstance(ret, ControllerAndData)
1186        return ret
1187
1188    @property
1189    def gpio_hogs(self) -> List[ControllerAndData]:
1190        "See the class docstring"
1191
1192        if "gpio-hog" not in self.props:
1193            return []
1194
1195        if not self.parent or not "gpio-controller" in self.parent.props:
1196            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1197
1198        if not "#gpio-cells" in self.parent._node.props:
1199            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1200
1201        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1202        res = []
1203
1204        for item in _slice(self._node, "gpios", 4*n_cells,
1205                           f"4*(<#gpio-cells> (= {n_cells})"):
1206            controller = self.parent
1207            res.append(ControllerAndData(
1208                node=self, controller=controller,
1209                data=self._named_cells(controller, item, "gpio"),
1210                name=None, basename="gpio"))
1211
1212        return res
1213
1214    def __repr__(self) -> str:
1215        if self.binding_path:
1216            binding = "binding " + self.binding_path
1217        else:
1218            binding = "no binding"
1219        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1220
1221    def _init_binding(self) -> None:
1222        # Initializes Node.matching_compat, Node._binding, and
1223        # Node.binding_path.
1224        #
1225        # Node._binding holds the data from the node's binding file, in the
1226        # format returned by PyYAML (plain Python lists, dicts, etc.), or None
1227        # if the node has no binding.
1228
1229        # This relies on the parent of the node having already been
1230        # initialized, which is guaranteed by going through the nodes in
1231        # node_iter() order.
1232
1233        if self.path in self.edt._infer_binding_for_paths:
1234            self._binding_from_properties()
1235            return
1236
1237        if self.compats:
1238            on_buses = self.on_buses
1239
1240            for compat in self.compats:
1241                # When matching, respect the order of the 'compatible' entries,
1242                # and for each one first try to match against an explicitly
1243                # specified bus (if any) and then against any bus. This is so
1244                # that matching against bindings which do not specify a bus
1245                # works the same way in Zephyr as it does elsewhere.
1246                binding = None
1247
1248                for bus in on_buses:
1249                    if (compat, bus) in self.edt._compat2binding:
1250                        binding = self.edt._compat2binding[compat, bus]
1251                        break
1252
1253                if not binding:
1254                    if (compat, None) in self.edt._compat2binding:
1255                        binding = self.edt._compat2binding[compat, None]
1256                    else:
1257                        continue
1258
1259                self.binding_path = binding.path
1260                self.matching_compat = compat
1261                self._binding = binding
1262                return
1263        else:
1264            # No 'compatible' property. See if the parent binding has
1265            # a compatible. This can come from one or more levels of
1266            # nesting with 'child-binding:'.
1267
1268            binding_from_parent = self._binding_from_parent()
1269            if binding_from_parent:
1270                self._binding = binding_from_parent
1271                self.binding_path = self._binding.path
1272                self.matching_compat = self._binding.compatible
1273
1274                return
1275
1276        # No binding found
1277        self._binding = self.binding_path = self.matching_compat = None
1278
1279    def _binding_from_properties(self) -> None:
1280        # Sets up a Binding object synthesized from the properties in the node.
1281
1282        if self.compats:
1283            _err(f"compatible in node with inferred binding: {self.path}")
1284
1285        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1286        raw: Dict[str, Any] = {
1287            'description': 'Inferred binding from properties, via edtlib.',
1288            'properties': {},
1289        }
1290        for name, prop in self._node.props.items():
1291            pp: Dict[str, str] = {}
1292            if prop.type == Type.EMPTY:
1293                pp["type"] = "boolean"
1294            elif prop.type == Type.BYTES:
1295                pp["type"] = "uint8-array"
1296            elif prop.type == Type.NUM:
1297                pp["type"] = "int"
1298            elif prop.type == Type.NUMS:
1299                pp["type"] = "array"
1300            elif prop.type == Type.STRING:
1301                pp["type"] = "string"
1302            elif prop.type == Type.STRINGS:
1303                pp["type"] = "string-array"
1304            elif prop.type == Type.PHANDLE:
1305                pp["type"] = "phandle"
1306            elif prop.type == Type.PHANDLES:
1307                pp["type"] = "phandles"
1308            elif prop.type == Type.PHANDLES_AND_NUMS:
1309                pp["type"] = "phandle-array"
1310            elif prop.type == Type.PATH:
1311                pp["type"] = "path"
1312            else:
1313                _err(f"cannot infer binding from property: {prop} "
1314                     f"with type {prop.type!r}")
1315            raw['properties'][name] = pp
1316
1317        # Set up Node state.
1318        self.binding_path = None
1319        self.matching_compat = None
1320        self.compats = []
1321        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1322
1323    def _binding_from_parent(self) -> Optional[Binding]:
1324        # Returns the binding from 'child-binding:' in the parent node's
1325        # binding.
1326
1327        if not self.parent:
1328            return None
1329
1330        pbinding = self.parent._binding
1331        if not pbinding:
1332            return None
1333
1334        if pbinding.child_binding:
1335            return pbinding.child_binding
1336
1337        return None
1338
1339    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1340                  ) -> Optional['Node']:
1341        # Returns the value for self.bus_node. Relies on parent nodes being
1342        # initialized before their children.
1343
1344        if not self.parent:
1345            # This is the root node
1346            return None
1347
1348        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1349        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1350        # bus be None means we'll always match the binding for fixed-partitions
1351        # also this means want processing the fixed-partitions node we wouldn't
1352        # try to do anything bus specific with it.
1353        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1354            return None
1355
1356        if self.parent.buses:
1357            # The parent node is a bus node
1358            return self.parent
1359
1360        # Same bus node as parent (possibly None)
1361        return self.parent.bus_node
1362
1363    def _init_props(self, default_prop_types: bool = False,
1364                    err_on_deprecated: bool = False) -> None:
1365        # Creates self.props. See the class docstring. Also checks that all
1366        # properties on the node are declared in its binding.
1367
1368        self.props = {}
1369
1370        node = self._node
1371        if self._binding:
1372            prop2specs = self._binding.prop2specs
1373        else:
1374            prop2specs = None
1375
1376        # Initialize self.props
1377        if prop2specs:
1378            for prop_spec in prop2specs.values():
1379                self._init_prop(prop_spec, err_on_deprecated)
1380            self._check_undeclared_props()
1381        elif default_prop_types:
1382            for name in node.props:
1383                if name not in _DEFAULT_PROP_SPECS:
1384                    continue
1385                prop_spec = _DEFAULT_PROP_SPECS[name]
1386                val = self._prop_val(name, prop_spec.type, False, False, None,
1387                                     None, err_on_deprecated)
1388                self.props[name] = Property(prop_spec, val, self)
1389
1390    def _init_prop(self, prop_spec: PropertySpec,
1391                   err_on_deprecated: bool) -> None:
1392        # _init_props() helper for initializing a single property.
1393        # 'prop_spec' is a PropertySpec object from the node's binding.
1394
1395        name = prop_spec.name
1396        prop_type = prop_spec.type
1397        if not prop_type:
1398            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1399
1400        val = self._prop_val(name, prop_type, prop_spec.deprecated,
1401                             prop_spec.required, prop_spec.default,
1402                             prop_spec.specifier_space, err_on_deprecated)
1403
1404        if val is None:
1405            # 'required: false' property that wasn't there, or a property type
1406            # for which we store no data.
1407            return
1408
1409        enum = prop_spec.enum
1410        if enum and val not in enum:
1411            _err(f"value of property '{name}' on {self.path} in "
1412                 f"{self.edt.dts_path} ({val!r}) is not in 'enum' list in "
1413                 f"{self.binding_path} ({enum!r})")
1414
1415        const = prop_spec.const
1416        if const is not None and val != const:
1417            _err(f"value of property '{name}' on {self.path} in "
1418                 f"{self.edt.dts_path} ({val!r}) "
1419                 "is different from the 'const' value specified in "
1420                 f"{self.binding_path} ({const!r})")
1421
1422        # Skip properties that start with '#', like '#size-cells', and mapping
1423        # properties like 'gpio-map'/'interrupt-map'
1424        if name[0] == "#" or name.endswith("-map"):
1425            return
1426
1427        self.props[name] = Property(prop_spec, val, self)
1428
1429    def _prop_val(self, name: str, prop_type: str,
1430                  deprecated: bool, required: bool,
1431                  default: PropertyValType,
1432                  specifier_space: Optional[str],
1433                  err_on_deprecated: bool) -> PropertyValType:
1434        # _init_prop() helper for getting the property's value
1435        #
1436        # name:
1437        #   Property name from binding
1438        #
1439        # prop_type:
1440        #   Property type from binding (a string like "int")
1441        #
1442        # deprecated:
1443        #   True if the property is deprecated
1444        #
1445        # required:
1446        #   True if the property is required to exist
1447        #
1448        # default:
1449        #   Default value to use when the property doesn't exist, or None if
1450        #   the binding doesn't give a default value
1451        #
1452        # specifier_space:
1453        #   Property specifier-space from binding (if prop_type is "phandle-array")
1454        #
1455        # err_on_deprecated:
1456        #   If True, a deprecated property is an error instead of warning.
1457
1458        node = self._node
1459        prop = node.props.get(name)
1460
1461        if prop and deprecated:
1462            msg = (f"'{name}' is marked as deprecated in 'properties:' "
1463                   f"in {self.binding_path} for node {node.path}.")
1464            if err_on_deprecated:
1465                _err(msg)
1466            else:
1467                _LOG.warning(msg)
1468
1469        if not prop:
1470            if required and self.status == "okay":
1471                _err(f"'{name}' is marked as required in 'properties:' in "
1472                     f"{self.binding_path}, but does not appear in {node!r}")
1473
1474            if default is not None:
1475                # YAML doesn't have a native format for byte arrays. We need to
1476                # convert those from an array like [0x12, 0x34, ...]. The
1477                # format has already been checked in
1478                # _check_prop_by_type().
1479                if prop_type == "uint8-array":
1480                    return bytes(default) # type: ignore
1481                return default
1482
1483            return False if prop_type == "boolean" else None
1484
1485        if prop_type == "boolean":
1486            if prop.type != Type.EMPTY:
1487                _err("'{0}' in {1!r} is defined with 'type: boolean' in {2}, "
1488                     "but is assigned a value ('{3}') instead of being empty "
1489                     "('{0};')".format(name, node, self.binding_path, prop))
1490            return True
1491
1492        if prop_type == "int":
1493            return prop.to_num()
1494
1495        if prop_type == "array":
1496            return prop.to_nums()
1497
1498        if prop_type == "uint8-array":
1499            return prop.to_bytes()
1500
1501        if prop_type == "string":
1502            return prop.to_string()
1503
1504        if prop_type == "string-array":
1505            return prop.to_strings()
1506
1507        if prop_type == "phandle":
1508            return self.edt._node2enode[prop.to_node()]
1509
1510        if prop_type == "phandles":
1511            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1512
1513        if prop_type == "phandle-array":
1514            # This type is a bit high-level for dtlib as it involves
1515            # information from bindings and *-names properties, so there's no
1516            # to_phandle_array() in dtlib. Do the type check ourselves.
1517            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1518                _err(f"expected property '{name}' in {node.path} in "
1519                     f"{node.dt.filename} to be assigned "
1520                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1521                     f"(a mix of phandles and numbers), not '{prop}'")
1522
1523            return self._standard_phandle_val_list(prop, specifier_space)
1524
1525        if prop_type == "path":
1526            return self.edt._node2enode[prop.to_path()]
1527
1528        # prop_type == "compound". Checking that the 'type:'
1529        # value is valid is done in _check_prop_by_type().
1530        #
1531        # 'compound' is a dummy type for properties that don't fit any of the
1532        # patterns above, so that we can require all entries in 'properties:'
1533        # to have a 'type: ...'. No Property object is created for it.
1534        return None
1535
1536    def _check_undeclared_props(self) -> None:
1537        # Checks that all properties are declared in the binding
1538
1539        for prop_name in self._node.props:
1540            # Allow a few special properties to not be declared in the binding
1541            if prop_name.endswith("-controller") or \
1542               prop_name.startswith("#") or \
1543               prop_name.startswith("pinctrl-") or \
1544               prop_name in {
1545                   "compatible", "status", "ranges", "phandle",
1546                   "interrupt-parent", "interrupts-extended", "device_type"}:
1547                continue
1548
1549            if TYPE_CHECKING:
1550                assert self._binding
1551
1552            if prop_name not in self._binding.prop2specs:
1553                _err(f"'{prop_name}' appears in {self._node.path} in "
1554                     f"{self.edt.dts_path}, but is not declared in "
1555                     f"'properties:' in {self.binding_path}")
1556
1557    def _init_ranges(self) -> None:
1558        # Initializes self.ranges
1559        node = self._node
1560
1561        self.ranges = []
1562
1563        if "ranges" not in node.props:
1564            return
1565
1566        raw_child_address_cells = node.props.get("#address-cells")
1567        parent_address_cells = _address_cells(node)
1568        if raw_child_address_cells is None:
1569            child_address_cells = 2 # Default value per DT spec.
1570        else:
1571            child_address_cells = raw_child_address_cells.to_num()
1572        raw_child_size_cells = node.props.get("#size-cells")
1573        if raw_child_size_cells is None:
1574            child_size_cells = 1 # Default value per DT spec.
1575        else:
1576            child_size_cells = raw_child_size_cells.to_num()
1577
1578        # Number of cells for one translation 3-tuple in 'ranges'
1579        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1580
1581        if entry_cells == 0:
1582            if len(node.props["ranges"].value) == 0:
1583                return
1584            else:
1585                _err(f"'ranges' should be empty in {self._node.path} since "
1586                     f"<#address-cells> = {child_address_cells}, "
1587                     f"<#address-cells for parent> = {parent_address_cells} and "
1588                     f"<#size-cells> = {child_size_cells}")
1589
1590        for raw_range in _slice(node, "ranges", 4*entry_cells,
1591                                f"4*(<#address-cells> (= {child_address_cells}) + "
1592                                "<#address-cells for parent> "
1593                                f"(= {parent_address_cells}) + "
1594                                f"<#size-cells> (= {child_size_cells}))"):
1595
1596            child_bus_cells = child_address_cells
1597            if child_address_cells == 0:
1598                child_bus_addr = None
1599            else:
1600                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1601            parent_bus_cells = parent_address_cells
1602            if parent_address_cells == 0:
1603                parent_bus_addr = None
1604            else:
1605                parent_bus_addr = to_num(
1606                    raw_range[(4*child_address_cells):
1607                              (4*child_address_cells + 4*parent_address_cells)])
1608            length_cells = child_size_cells
1609            if child_size_cells == 0:
1610                length = None
1611            else:
1612                length = to_num(
1613                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1614
1615            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1616                                     parent_bus_cells, parent_bus_addr,
1617                                     length_cells, length))
1618
1619    def _init_regs(self) -> None:
1620        # Initializes self.regs
1621
1622        node = self._node
1623
1624        self.regs = []
1625
1626        if "reg" not in node.props:
1627            return
1628
1629        address_cells = _address_cells(node)
1630        size_cells = _size_cells(node)
1631
1632        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1633                              f"4*(<#address-cells> (= {address_cells}) + "
1634                              f"<#size-cells> (= {size_cells}))"):
1635            if address_cells == 0:
1636                addr = None
1637            else:
1638                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1639            if size_cells == 0:
1640                size = None
1641            else:
1642                size = to_num(raw_reg[4*address_cells:])
1643            if size_cells != 0 and size == 0:
1644                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1645                     "(maybe you want a size of one or #size-cells = 0 "
1646                     "instead)")
1647
1648            # We'll fix up the name when we're done.
1649            self.regs.append(Register(self, None, addr, size))
1650
1651        _add_names(node, "reg", self.regs)
1652
1653    def _init_pinctrls(self) -> None:
1654        # Initializes self.pinctrls from any pinctrl-<index> properties
1655
1656        node = self._node
1657
1658        # pinctrl-<index> properties
1659        pinctrl_props = [prop for name, prop in node.props.items()
1660                         if re.match("pinctrl-[0-9]+", name)]
1661        # Sort by index
1662        pinctrl_props.sort(key=lambda prop: prop.name)
1663
1664        # Check indices
1665        for i, prop in enumerate(pinctrl_props):
1666            if prop.name != "pinctrl-" + str(i):
1667                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1668                     "- indices should be contiguous and start from zero")
1669
1670        self.pinctrls = []
1671        for prop in pinctrl_props:
1672            # We'll fix up the names below.
1673            self.pinctrls.append(PinCtrl(
1674                node=self,
1675                name=None,
1676                conf_nodes=[self.edt._node2enode[node]
1677                            for node in prop.to_nodes()]))
1678
1679        _add_names(node, "pinctrl", self.pinctrls)
1680
1681    def _init_interrupts(self) -> None:
1682        # Initializes self.interrupts
1683
1684        node = self._node
1685
1686        self.interrupts = []
1687
1688        for controller_node, data in _interrupts(node):
1689            # We'll fix up the names below.
1690            controller = self.edt._node2enode[controller_node]
1691            self.interrupts.append(ControllerAndData(
1692                node=self, controller=controller,
1693                data=self._named_cells(controller, data, "interrupt"),
1694                name=None, basename=None))
1695
1696        _add_names(node, "interrupt", self.interrupts)
1697
1698    def _standard_phandle_val_list(
1699            self,
1700            prop: dtlib_Property,
1701            specifier_space: Optional[str]
1702    ) -> List[Optional[ControllerAndData]]:
1703        # Parses a property like
1704        #
1705        #     <prop.name> = <phandle cell phandle cell ...>;
1706        #
1707        # where each phandle points to a controller node that has a
1708        #
1709        #     #<specifier_space>-cells = <size>;
1710        #
1711        # property that gives the number of cells in the value after the
1712        # controller's phandle in the property.
1713        #
1714        # E.g. with a property like
1715        #
1716        #     pwms = <&foo 1 2 &bar 3>;
1717        #
1718        # If 'specifier_space' is "pwm", then we should have this elsewhere
1719        # in the tree:
1720        #
1721        #     foo: ... {
1722        #             #pwm-cells = <2>;
1723        #     };
1724        #
1725        #     bar: ... {
1726        #             #pwm-cells = <1>;
1727        #     };
1728        #
1729        # These values can be given names using the <specifier_space>-names:
1730        # list in the binding for the phandle nodes.
1731        #
1732        # Also parses any
1733        #
1734        #     <specifier_space>-names = "...", "...", ...
1735        #
1736        # Returns a list of Optional[ControllerAndData] instances.
1737        #
1738        # An index is None if the underlying phandle-array element is
1739        # unspecified.
1740
1741        if not specifier_space:
1742            if prop.name.endswith("gpios"):
1743                # There's some slight special-casing for *-gpios properties in that
1744                # e.g. foo-gpios still maps to #gpio-cells rather than
1745                # #foo-gpio-cells
1746                specifier_space = "gpio"
1747            else:
1748                # Strip -s. We've already checked that property names end in -s
1749                # if there is no specifier space in _check_prop_by_type().
1750                specifier_space = prop.name[:-1]
1751
1752        res: List[Optional[ControllerAndData]] = []
1753
1754        for item in _phandle_val_list(prop, specifier_space):
1755            if item is None:
1756                res.append(None)
1757                continue
1758
1759            controller_node, data = item
1760            mapped_controller, mapped_data = \
1761                _map_phandle_array_entry(prop.node, controller_node, data,
1762                                         specifier_space)
1763
1764            controller = self.edt._node2enode[mapped_controller]
1765            # We'll fix up the names below.
1766            res.append(ControllerAndData(
1767                node=self, controller=controller,
1768                data=self._named_cells(controller, mapped_data,
1769                                       specifier_space),
1770                name=None, basename=specifier_space))
1771
1772        _add_names(self._node, specifier_space, res)
1773
1774        return res
1775
1776    def _named_cells(
1777            self,
1778            controller: 'Node',
1779            data: bytes,
1780            basename: str
1781    ) -> Dict[str, int]:
1782        # Returns a dictionary that maps <basename>-cells names given in the
1783        # binding for 'controller' to cell values. 'data' is the raw data, as a
1784        # byte array.
1785
1786        if not controller._binding:
1787            _err(f"{basename} controller {controller._node!r} "
1788                 f"for {self._node!r} lacks binding")
1789
1790        if basename in controller._binding.specifier2cells:
1791            cell_names: List[str] = controller._binding.specifier2cells[basename]
1792        else:
1793            # Treat no *-cells in the binding the same as an empty *-cells, so
1794            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1795            # '#clock-cells = <0>'.
1796            cell_names = []
1797
1798        data_list = to_nums(data)
1799        if len(data_list) != len(cell_names):
1800            _err(f"unexpected '{basename}-cells:' length in binding for "
1801                 f"{controller._node!r} - {len(cell_names)} "
1802                 f"instead of {len(data_list)}")
1803
1804        return dict(zip(cell_names, data_list))
1805
1806
1807class EDT:
1808    """
1809    Represents a devicetree augmented with information from bindings.
1810
1811    These attributes are available on EDT objects:
1812
1813    nodes:
1814      A list of Node objects for the nodes that appear in the devicetree
1815
1816    compat2nodes:
1817      A collections.defaultdict that maps each 'compatible' string that appears
1818      on some Node to a list of Nodes with that compatible.
1819
1820    compat2okay:
1821      Like compat2nodes, but just for nodes with status 'okay'.
1822
1823    compat2vendor:
1824      A collections.defaultdict that maps each 'compatible' string that appears
1825      on some Node to a vendor name parsed from vendor_prefixes.
1826
1827    compat2model:
1828      A collections.defaultdict that maps each 'compatible' string that appears
1829      on some Node to a model name parsed from that compatible.
1830
1831    label2node:
1832      A dict that maps a node label to the node with that label.
1833
1834    dep_ord2node:
1835      A dict that maps an ordinal to the node with that dependency ordinal.
1836
1837    chosen_nodes:
1838      A dict that maps the properties defined on the devicetree's /chosen
1839      node to their values. 'chosen' is indexed by property name (a string),
1840      and values are converted to Node objects. Note that properties of the
1841      /chosen node which can't be converted to a Node are not included in
1842      the value.
1843
1844    dts_path:
1845      The .dts path passed to __init__()
1846
1847    dts_source:
1848      The final DTS source code of the loaded devicetree after merging nodes
1849      and processing /delete-node/ and /delete-property/, as a string
1850
1851    bindings_dirs:
1852      The bindings directory paths passed to __init__()
1853
1854    scc_order:
1855      A list of lists of Nodes. All elements of each list
1856      depend on each other, and the Nodes in any list do not depend
1857      on any Node in a subsequent list. Each list defines a Strongly
1858      Connected Component (SCC) of the graph.
1859
1860      For an acyclic graph each list will be a singleton. Cycles
1861      will be represented by lists with multiple nodes. Cycles are
1862      not expected to be present in devicetree graphs.
1863
1864    The standard library's pickle module can be used to marshal and
1865    unmarshal EDT objects.
1866    """
1867
1868    def __init__(self,
1869                 dts: Optional[str],
1870                 bindings_dirs: List[str],
1871                 warn_reg_unit_address_mismatch: bool = True,
1872                 default_prop_types: bool = True,
1873                 support_fixed_partitions_on_any_bus: bool = True,
1874                 infer_binding_for_paths: Optional[Iterable[str]] = None,
1875                 vendor_prefixes: Optional[Dict[str, str]] = None,
1876                 werror: bool = False):
1877        """EDT constructor.
1878
1879        dts:
1880          Path to devicetree .dts file. Passing None for this value
1881          is only for internal use; do not do that outside of edtlib.
1882
1883        bindings_dirs:
1884          List of paths to directories containing bindings, in YAML format.
1885          These directories are recursively searched for .yaml files.
1886
1887        warn_reg_unit_address_mismatch (default: True):
1888          If True, a warning is logged if a node has a 'reg' property where
1889          the address of the first entry does not match the unit address of the
1890          node
1891
1892        default_prop_types (default: True):
1893          If True, default property types will be used when a node has no
1894          bindings.
1895
1896        support_fixed_partitions_on_any_bus (default True):
1897          If True, set the Node.bus for 'fixed-partitions' compatible nodes
1898          to None.  This allows 'fixed-partitions' binding to match regardless
1899          of the bus the 'fixed-partition' is under.
1900
1901        infer_binding_for_paths (default: None):
1902          An iterable of devicetree paths identifying nodes for which bindings
1903          should be inferred from the node content.  (Child nodes are not
1904          processed.)  Pass none if no nodes should support inferred bindings.
1905
1906        vendor_prefixes (default: None):
1907          A dict mapping vendor prefixes in compatible properties to their
1908          descriptions. If given, compatibles in the form "manufacturer,device"
1909          for which "manufacturer" is neither a key in the dict nor a specially
1910          exempt set of grandfathered-in cases will cause warnings.
1911
1912        werror (default: False):
1913          If True, some edtlib specific warnings become errors. This currently
1914          errors out if 'dts' has any deprecated properties set, or an unknown
1915          vendor prefix is used.
1916        """
1917        # All instance attributes should be initialized here.
1918        # This makes it easy to keep track of them, which makes
1919        # implementing __deepcopy__() easier.
1920        # If you change this, make sure to update __deepcopy__() too,
1921        # and update the tests for that method.
1922
1923        # Public attributes (the rest are properties)
1924        self.nodes: List[Node] = []
1925        self.compat2nodes: Dict[str, List[Node]] = defaultdict(list)
1926        self.compat2okay: Dict[str, List[Node]] = defaultdict(list)
1927        self.compat2vendor: Dict[str, str] = defaultdict(str)
1928        self.compat2model: Dict[str, str]  = defaultdict(str)
1929        self.label2node: Dict[str, Node] = {}
1930        self.dep_ord2node: Dict[int, Node] = {}
1931        self.dts_path: str = dts # type: ignore
1932        self.bindings_dirs: List[str] = list(bindings_dirs)
1933
1934        # Saved kwarg values for internal use
1935        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
1936        self._default_prop_types: bool = default_prop_types
1937        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
1938        self._infer_binding_for_paths: Set[str] = set(infer_binding_for_paths or [])
1939        self._vendor_prefixes: Dict[str, str] = vendor_prefixes or {}
1940        self._werror: bool = bool(werror)
1941
1942        # Other internal state
1943        self._compat2binding: Dict[Tuple[str, Optional[str]], Binding] = {}
1944        self._graph: Graph = Graph()
1945        self._binding_paths: List[str] = _binding_paths(self.bindings_dirs)
1946        self._binding_fname2path: Dict[str, str] = {
1947            os.path.basename(path): path
1948            for path in self._binding_paths
1949        }
1950        self._node2enode: Dict[dtlib_Node, Node] = {}
1951
1952        if dts is not None:
1953            try:
1954                self._dt = DT(dts)
1955            except DTError as e:
1956                raise EDTError(e) from e
1957            self._finish_init()
1958
1959    def _finish_init(self) -> None:
1960        # This helper exists to make the __deepcopy__() implementation
1961        # easier to keep in sync with __init__().
1962        _check_dt(self._dt)
1963
1964        self._init_compat2binding()
1965        self._init_nodes()
1966        self._init_graph()
1967        self._init_luts()
1968
1969        self._check()
1970
1971    def get_node(self, path: str) -> Node:
1972        """
1973        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
1974        path or alias doesn't exist.
1975        """
1976        try:
1977            return self._node2enode[self._dt.get_node(path)]
1978        except DTError as e:
1979            _err(e)
1980
1981    @property
1982    def chosen_nodes(self) -> Dict[str, Node]:
1983        ret: Dict[str, Node] = {}
1984
1985        try:
1986            chosen = self._dt.get_node("/chosen")
1987        except DTError:
1988            return ret
1989
1990        for name, prop in chosen.props.items():
1991            try:
1992                node = prop.to_path()
1993            except DTError:
1994                # DTS value is not phandle or string, or path doesn't exist
1995                continue
1996
1997            ret[name] = self._node2enode[node]
1998
1999        return ret
2000
2001    def chosen_node(self, name: str) -> Optional[Node]:
2002        """
2003        Returns the Node pointed at by the property named 'name' in /chosen, or
2004        None if the property is missing
2005        """
2006        return self.chosen_nodes.get(name)
2007
2008    @property
2009    def dts_source(self) -> str:
2010        return f"{self._dt}"
2011
2012    def __repr__(self) -> str:
2013        return f"<EDT for '{self.dts_path}', binding directories " \
2014            f"'{self.bindings_dirs}'>"
2015
2016    def __deepcopy__(self, memo) -> 'EDT':
2017        """
2018        Implements support for the standard library copy.deepcopy()
2019        function on EDT instances.
2020        """
2021
2022        ret = EDT(
2023            None,
2024            self.bindings_dirs,
2025            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2026            default_prop_types=self._default_prop_types,
2027            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2028            infer_binding_for_paths=set(self._infer_binding_for_paths),
2029            vendor_prefixes=dict(self._vendor_prefixes),
2030            werror=self._werror
2031        )
2032        ret.dts_path = self.dts_path
2033        ret._dt = deepcopy(self._dt, memo)
2034        ret._finish_init()
2035        return ret
2036
2037    @property
2038    def scc_order(self) -> List[List[Node]]:
2039        try:
2040            return self._graph.scc_order()
2041        except Exception as e:
2042            raise EDTError(e)
2043
2044    def _init_graph(self) -> None:
2045        # Constructs a graph of dependencies between Node instances,
2046        # which is usable for computing a partial order over the dependencies.
2047        # The algorithm supports detecting dependency loops.
2048        #
2049        # Actually computing the SCC order is lazily deferred to the
2050        # first time the scc_order property is read.
2051
2052        for node in self.nodes:
2053            # A Node always depends on its parent.
2054            for child in node.children.values():
2055                self._graph.add_edge(child, node)
2056
2057            # A Node depends on any Nodes present in 'phandle',
2058            # 'phandles', or 'phandle-array' property values.
2059            for prop in node.props.values():
2060                if prop.type == 'phandle':
2061                    self._graph.add_edge(node, prop.val)
2062                elif prop.type == 'phandles':
2063                    if TYPE_CHECKING:
2064                        assert isinstance(prop.val, list)
2065                    for phandle_node in prop.val:
2066                        self._graph.add_edge(node, phandle_node)
2067                elif prop.type == 'phandle-array':
2068                    if TYPE_CHECKING:
2069                        assert isinstance(prop.val, list)
2070                    for cd in prop.val:
2071                        if cd is None:
2072                            continue
2073                        if TYPE_CHECKING:
2074                            assert isinstance(cd, ControllerAndData)
2075                        self._graph.add_edge(node, cd.controller)
2076
2077            # A Node depends on whatever supports the interrupts it
2078            # generates.
2079            for intr in node.interrupts:
2080                self._graph.add_edge(node, intr.controller)
2081
2082    def _init_compat2binding(self) -> None:
2083        # Creates self._compat2binding, a dictionary that maps
2084        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2085        #
2086        # The Binding objects are created from YAML files discovered
2087        # in self.bindings_dirs as needed.
2088        #
2089        # For example, self._compat2binding["company,dev", "can"]
2090        # contains the Binding for the 'company,dev' device, when it
2091        # appears on the CAN bus.
2092        #
2093        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2094        # self._compat2binding["company,notonbus", None] is the Binding.
2095        #
2096        # Only bindings for 'compatible' strings that appear in the devicetree
2097        # are loaded.
2098
2099        dt_compats = _dt_compats(self._dt)
2100        # Searches for any 'compatible' string mentioned in the devicetree
2101        # files, with a regex
2102        dt_compats_search = re.compile(
2103            "|".join(re.escape(compat) for compat in dt_compats)
2104        ).search
2105
2106        for binding_path in self._binding_paths:
2107            with open(binding_path, encoding="utf-8") as f:
2108                contents = f.read()
2109
2110            # As an optimization, skip parsing files that don't contain any of
2111            # the .dts 'compatible' strings, which should be reasonably safe
2112            if not dt_compats_search(contents):
2113                continue
2114
2115            # Load the binding and check that it actually matches one of the
2116            # compatibles. Might get false positives above due to comments and
2117            # stuff.
2118
2119            try:
2120                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2121                # representing the file)
2122                raw = yaml.load(contents, Loader=_BindingLoader)
2123            except yaml.YAMLError as e:
2124                _err(
2125                        f"'{binding_path}' appears in binding directories "
2126                        f"but isn't valid YAML: {e}")
2127                continue
2128
2129            # Convert the raw data to a Binding object, erroring out
2130            # if necessary.
2131            binding = self._binding(raw, binding_path, dt_compats)
2132
2133            # Register the binding in self._compat2binding, along with
2134            # any child bindings that have their own compatibles.
2135            while binding is not None:
2136                if binding.compatible:
2137                    self._register_binding(binding)
2138                binding = binding.child_binding
2139
2140    def _binding(self,
2141                 raw: Optional[dict],
2142                 binding_path: str,
2143                 dt_compats: Set[str]) -> Optional[Binding]:
2144        # Convert a 'raw' binding from YAML to a Binding object and return it.
2145        #
2146        # Error out if the raw data looks like an invalid binding.
2147        #
2148        # Return None if the file doesn't contain a binding or the
2149        # binding's compatible isn't in dt_compats.
2150
2151        # Get the 'compatible:' string.
2152        if raw is None or "compatible" not in raw:
2153            # Empty file, binding fragment, spurious file, etc.
2154            return None
2155
2156        compatible = raw["compatible"]
2157
2158        if compatible not in dt_compats:
2159            # Not a compatible we care about.
2160            return None
2161
2162        # Initialize and return the Binding object.
2163        return Binding(binding_path, self._binding_fname2path, raw=raw)
2164
2165    def _register_binding(self, binding: Binding) -> None:
2166        # Do not allow two different bindings to have the same
2167        # 'compatible:'/'on-bus:' combo
2168        if TYPE_CHECKING:
2169            assert binding.compatible
2170        old_binding = self._compat2binding.get((binding.compatible,
2171                                                binding.on_bus))
2172        if old_binding:
2173            msg = (f"both {old_binding.path} and {binding.path} have "
2174                   f"'compatible: {binding.compatible}'")
2175            if binding.on_bus is not None:
2176                msg += f" and 'on-bus: {binding.on_bus}'"
2177            _err(msg)
2178
2179        # Register the binding.
2180        self._compat2binding[binding.compatible, binding.on_bus] = binding
2181
2182    def _init_nodes(self) -> None:
2183        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2184        # self.nodes
2185
2186        for dt_node in self._dt.node_iter():
2187            # Warning: We depend on parent Nodes being created before their
2188            # children. This is guaranteed by node_iter().
2189            if "compatible" in dt_node.props:
2190                compats = dt_node.props["compatible"].to_strings()
2191            else:
2192                compats = []
2193            node = Node(dt_node, self, compats)
2194            node.bus_node = node._bus_node(self._fixed_partitions_no_bus)
2195            node._init_binding()
2196            node._init_regs()
2197            node._init_ranges()
2198
2199            self.nodes.append(node)
2200            self._node2enode[dt_node] = node
2201
2202        for node in self.nodes:
2203            # These depend on all Node objects having been created, because
2204            # they (either always or sometimes) reference other nodes, so we
2205            # run them separately
2206            node._init_props(default_prop_types=self._default_prop_types,
2207                             err_on_deprecated=self._werror)
2208            node._init_interrupts()
2209            node._init_pinctrls()
2210
2211        if self._warn_reg_unit_address_mismatch:
2212            # This warning matches the simple_bus_reg warning in dtc
2213            for node in self.nodes:
2214                if node.regs and node.regs[0].addr != node.unit_addr:
2215                    _LOG.warning("unit address and first address in 'reg' "
2216                                 f"(0x{node.regs[0].addr:x}) don't match for "
2217                                 f"{node.path}")
2218
2219    def _init_luts(self) -> None:
2220        # Initialize node lookup tables (LUTs).
2221
2222        for node in self.nodes:
2223            for label in node.labels:
2224                self.label2node[label] = node
2225
2226            for compat in node.compats:
2227                self.compat2nodes[compat].append(node)
2228
2229                if node.status == "okay":
2230                    self.compat2okay[compat].append(node)
2231
2232                if compat in self.compat2vendor:
2233                    continue
2234
2235                # The regular expression comes from dt-schema.
2236                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2237                if not re.match(compat_re, compat):
2238                    _err(f"node '{node.path}' compatible '{compat}' "
2239                         'must match this regular expression: '
2240                         f"'{compat_re}'")
2241
2242                if ',' in compat and self._vendor_prefixes:
2243                    vendor, model = compat.split(',', 1)
2244                    if vendor in self._vendor_prefixes:
2245                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2246                        self.compat2model[compat] = model
2247
2248                    # As an exception, the root node can have whatever
2249                    # compatibles it wants. Other nodes get checked.
2250                    elif node.path != '/' and \
2251                       vendor not in _VENDOR_PREFIX_ALLOWED:
2252                        if self._werror:
2253                            handler_fn: Any = _err
2254                        else:
2255                            handler_fn = _LOG.warning
2256                        handler_fn(
2257                            f"node '{node.path}' compatible '{compat}' "
2258                            f"has unknown vendor prefix '{vendor}'")
2259
2260
2261        for nodeset in self.scc_order:
2262            node = nodeset[0]
2263            self.dep_ord2node[node.dep_ordinal] = node
2264
2265    def _check(self) -> None:
2266        # Tree-wide checks and warnings.
2267
2268        for binding in self._compat2binding.values():
2269            for spec in binding.prop2specs.values():
2270                if not spec.enum or spec.type != 'string':
2271                    continue
2272
2273                if not spec.enum_tokenizable:
2274                    _LOG.warning(
2275                        f"compatible '{binding.compatible}' "
2276                        f"in binding '{binding.path}' has non-tokenizable enum "
2277                        f"for property '{spec.name}': " +
2278                        ', '.join(repr(x) for x in spec.enum))
2279                elif not spec.enum_upper_tokenizable:
2280                    _LOG.warning(
2281                        f"compatible '{binding.compatible}' "
2282                        f"in binding '{binding.path}' has enum for property "
2283                        f"'{spec.name}' that is only tokenizable "
2284                        'in lowercase: ' +
2285                        ', '.join(repr(x) for x in spec.enum))
2286
2287        # Validate the contents of compatible properties.
2288        for node in self.nodes:
2289            if 'compatible' not in node.props:
2290                continue
2291
2292            compatibles = node.props['compatible'].val
2293
2294            # _check() runs after _init_compat2binding() has called
2295            # _dt_compats(), which already converted every compatible
2296            # property to a list of strings. So we know 'compatibles'
2297            # is a list, but add an assert for future-proofing.
2298            assert isinstance(compatibles, list)
2299
2300            for compat in compatibles:
2301                # This is also just for future-proofing.
2302                assert isinstance(compat, str)
2303
2304
2305def bindings_from_paths(yaml_paths: List[str],
2306                        ignore_errors: bool = False) -> List[Binding]:
2307    """
2308    Get a list of Binding objects from the yaml files 'yaml_paths'.
2309
2310    If 'ignore_errors' is True, YAML files that cause an EDTError when
2311    loaded are ignored. (No other exception types are silenced.)
2312    """
2313
2314    ret = []
2315    fname2path = {os.path.basename(path): path for path in yaml_paths}
2316    for path in yaml_paths:
2317        try:
2318            ret.append(Binding(path, fname2path))
2319        except EDTError:
2320            if ignore_errors:
2321                continue
2322            raise
2323
2324    return ret
2325
2326
2327class EDTError(Exception):
2328    "Exception raised for devicetree- and binding-related errors"
2329
2330#
2331# Public global functions
2332#
2333
2334
2335def load_vendor_prefixes_txt(vendor_prefixes: str) -> Dict[str, str]:
2336    """Load a vendor-prefixes.txt file and return a dict
2337    representation mapping a vendor prefix to the vendor name.
2338    """
2339    vnd2vendor: Dict[str, str] = {}
2340    with open(vendor_prefixes, 'r', encoding='utf-8') as f:
2341        for line in f:
2342            line = line.strip()
2343
2344            if not line or line.startswith('#'):
2345                # Comment or empty line.
2346                continue
2347
2348            # Other lines should be in this form:
2349            #
2350            # <vnd><TAB><vendor>
2351            vnd_vendor = line.split('\t', 1)
2352            assert len(vnd_vendor) == 2, line
2353            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2354    return vnd2vendor
2355
2356#
2357# Private global functions
2358#
2359
2360
2361def _dt_compats(dt: DT) -> Set[str]:
2362    # Returns a set() with all 'compatible' strings in the devicetree
2363    # represented by dt (a dtlib.DT instance)
2364
2365    return {compat
2366            for node in dt.node_iter()
2367                if "compatible" in node.props
2368                    for compat in node.props["compatible"].to_strings()}
2369
2370
2371def _binding_paths(bindings_dirs: List[str]) -> List[str]:
2372    # Returns a list with the paths to all bindings (.yaml files) in
2373    # 'bindings_dirs'
2374
2375    binding_paths = []
2376
2377    for bindings_dir in bindings_dirs:
2378        for root, _, filenames in os.walk(bindings_dir):
2379            for filename in filenames:
2380                if filename.endswith(".yaml") or filename.endswith(".yml"):
2381                    binding_paths.append(os.path.join(root, filename))
2382
2383    return binding_paths
2384
2385
2386def _binding_inc_error(msg):
2387    # Helper for reporting errors in the !include implementation
2388
2389    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2390
2391
2392def _check_include_dict(name: Optional[str],
2393                        allowlist: Optional[List[str]],
2394                        blocklist: Optional[List[str]],
2395                        child_filter: Optional[dict],
2396                        binding_path: Optional[str]) -> None:
2397    # Check that an 'include:' named 'name' with property-allowlist
2398    # 'allowlist', property-blocklist 'blocklist', and
2399    # child-binding filter 'child_filter' has valid structure.
2400
2401    if name is None:
2402        _err(f"'include:' element in {binding_path} "
2403             "should have a 'name' key")
2404
2405    if allowlist is not None and blocklist is not None:
2406        _err(f"'include:' of file '{name}' in {binding_path} "
2407             "should not specify both 'property-allowlist:' "
2408             "and 'property-blocklist:'")
2409
2410    while child_filter is not None:
2411        child_copy = deepcopy(child_filter)
2412        child_allowlist: Optional[List[str]] = \
2413            child_copy.pop('property-allowlist', None)
2414        child_blocklist: Optional[List[str]] = \
2415            child_copy.pop('property-blocklist', None)
2416        next_child_filter: Optional[dict] = \
2417            child_copy.pop('child-binding', None)
2418
2419        if child_copy:
2420            # We've popped out all the valid keys.
2421            _err(f"'include:' of file '{name}' in {binding_path} "
2422                 "should not have these unexpected contents in a "
2423                 f"'child-binding': {child_copy}")
2424
2425        if child_allowlist is not None and child_blocklist is not None:
2426            _err(f"'include:' of file '{name}' in {binding_path} "
2427                 "should not specify both 'property-allowlist:' and "
2428                 "'property-blocklist:' in a 'child-binding:'")
2429
2430        child_filter = next_child_filter
2431
2432
2433def _filter_properties(raw: dict,
2434                       allowlist: Optional[List[str]],
2435                       blocklist: Optional[List[str]],
2436                       child_filter: Optional[dict],
2437                       binding_path: Optional[str]) -> None:
2438    # Destructively modifies 'raw["properties"]' and
2439    # 'raw["child-binding"]', if they exist, according to
2440    # 'allowlist', 'blocklist', and 'child_filter'.
2441
2442    props = raw.get('properties')
2443    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2444
2445    child_binding = raw.get('child-binding')
2446    while child_filter is not None and child_binding is not None:
2447        _filter_properties_helper(child_binding.get('properties'),
2448                                  child_filter.get('property-allowlist'),
2449                                  child_filter.get('property-blocklist'),
2450                                  binding_path)
2451        child_filter = child_filter.get('child-binding')
2452        child_binding = child_binding.get('child-binding')
2453
2454
2455def _filter_properties_helper(props: Optional[dict],
2456                              allowlist: Optional[List[str]],
2457                              blocklist: Optional[List[str]],
2458                              binding_path: Optional[str]) -> None:
2459    if props is None or (allowlist is None and blocklist is None):
2460        return
2461
2462    _check_prop_filter('property-allowlist', allowlist, binding_path)
2463    _check_prop_filter('property-blocklist', blocklist, binding_path)
2464
2465    if allowlist is not None:
2466        allowset = set(allowlist)
2467        to_del = [prop for prop in props if prop not in allowset]
2468    else:
2469        if TYPE_CHECKING:
2470            assert blocklist
2471        blockset = set(blocklist)
2472        to_del = [prop for prop in props if prop in blockset]
2473
2474    for prop in to_del:
2475        del props[prop]
2476
2477
2478def _check_prop_filter(name: str, value: Optional[List[str]],
2479                       binding_path: Optional[str]) -> None:
2480    # Ensure an include: ... property-allowlist or property-blocklist
2481    # is a list.
2482
2483    if value is None:
2484        return
2485
2486    if not isinstance(value, list):
2487        _err(f"'{name}' value {value} in {binding_path} should be a list")
2488
2489
2490def _merge_props(to_dict: dict,
2491                 from_dict: dict,
2492                 parent: Optional[str],
2493                 binding_path: Optional[str],
2494                 check_required: bool = False):
2495    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2496    #
2497    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2498    # property, then the values are ORed together.
2499    #
2500    # If 'check_required' is True, then an error is raised if 'from_dict' has
2501    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2502    # bindings from "downgrading" requirements from bindings they include,
2503    # which might help keep bindings well-organized.
2504    #
2505    # It's an error for most other keys to appear in both 'from_dict' and
2506    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2507    # precedence.
2508    #
2509    # 'parent' is the name of the parent key containing 'to_dict' and
2510    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2511    # These are used to generate errors for sketchy property overwrites.
2512
2513    for prop in from_dict:
2514        if isinstance(to_dict.get(prop), dict) and \
2515           isinstance(from_dict[prop], dict):
2516            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2517                         check_required)
2518        elif prop not in to_dict:
2519            to_dict[prop] = from_dict[prop]
2520        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2521            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2522                 f"from included file overwritten ('{from_dict[prop]}' "
2523                 f"replaced with '{to_dict[prop]}')")
2524        elif prop == "required":
2525            # Need a separate check here, because this code runs before
2526            # Binding._check()
2527            if not (isinstance(from_dict["required"], bool) and
2528                    isinstance(to_dict["required"], bool)):
2529                _err(f"malformed 'required:' setting for '{parent}' in "
2530                     f"'properties' in {binding_path}, expected true/false")
2531
2532            # 'required: true' takes precedence
2533            to_dict["required"] = to_dict["required"] or from_dict["required"]
2534
2535
2536def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2537                   check_required: bool) -> bool:
2538    # _merge_props() helper. Returns True in cases where it's bad that
2539    # to_dict[prop] takes precedence over from_dict[prop].
2540
2541    if to_dict[prop] == from_dict[prop]:
2542        return False
2543
2544    # These are overridden deliberately
2545    if prop in {"title", "description", "compatible"}:
2546        return False
2547
2548    if prop == "required":
2549        if not check_required:
2550            return False
2551        return from_dict[prop] and not to_dict[prop]
2552
2553    return True
2554
2555
2556def _binding_include(loader, node):
2557    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2558    # just becomes [foo, bar].
2559
2560    if isinstance(node, yaml.ScalarNode):
2561        # !include foo.yaml
2562        return [loader.construct_scalar(node)]
2563
2564    if isinstance(node, yaml.SequenceNode):
2565        # !include [foo.yaml, bar.yaml]
2566        return loader.construct_sequence(node)
2567
2568    _binding_inc_error("unrecognised node type in !include statement")
2569
2570
2571def _check_prop_by_type(prop_name: str,
2572                        options: dict,
2573                        binding_path: Optional[str]) -> None:
2574    # Binding._check_properties() helper. Checks 'type:', 'default:',
2575    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2576
2577    prop_type = options.get("type")
2578    default = options.get("default")
2579    const = options.get("const")
2580
2581    if prop_type is None:
2582        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2583             f"{binding_path}")
2584
2585    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2586                "string-array", "phandle", "phandles", "phandle-array",
2587                "path", "compound"}
2588
2589    if prop_type not in ok_types:
2590        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2591             f"has unknown type '{prop_type}', expected one of " +
2592             ", ".join(ok_types))
2593
2594    if "specifier-space" in options and prop_type != "phandle-array":
2595        _err(f"'specifier-space' in 'properties: {prop_name}' "
2596             f"has type '{prop_type}', expected 'phandle-array'")
2597
2598    if prop_type == "phandle-array":
2599        if not prop_name.endswith("s") and not "specifier-space" in options:
2600            _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2601                 f"has type 'phandle-array' and its name does not end in 's', "
2602                 f"but no 'specifier-space' was provided.")
2603
2604    # If you change const_types, be sure to update the type annotation
2605    # for PropertySpec.const.
2606    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2607    if const and prop_type not in const_types:
2608        _err(f"const in {binding_path} for property '{prop_name}' "
2609             f"has type '{prop_type}', expected one of " +
2610             ", ".join(const_types))
2611
2612    # Check default
2613
2614    if default is None:
2615        return
2616
2617    if prop_type in {"boolean", "compound", "phandle", "phandles",
2618                     "phandle-array", "path"}:
2619        _err("'default:' can't be combined with "
2620             f"'type: {prop_type}' for '{prop_name}' in "
2621             f"'properties:' in {binding_path}")
2622
2623    def ok_default() -> bool:
2624        # Returns True if 'default' is an okay default for the property's type.
2625        # If you change this, be sure to update the type annotation for
2626        # PropertySpec.default.
2627
2628        if prop_type == "int" and isinstance(default, int) or \
2629           prop_type == "string" and isinstance(default, str):
2630            return True
2631
2632        # array, uint8-array, or string-array
2633
2634        if not isinstance(default, list):
2635            return False
2636
2637        if prop_type == "array" and \
2638           all(isinstance(val, int) for val in default):
2639            return True
2640
2641        if prop_type == "uint8-array" and \
2642           all(isinstance(val, int) and 0 <= val <= 255 for val in default):
2643            return True
2644
2645        # string-array
2646        return all(isinstance(val, str) for val in default)
2647
2648    if not ok_default():
2649        _err(f"'default: {default}' is invalid for '{prop_name}' "
2650             f"in 'properties:' in {binding_path}, "
2651             f"which has type {prop_type}")
2652
2653
2654def _translate(addr: int, node: dtlib_Node) -> int:
2655    # Recursively translates 'addr' on 'node' to the address space(s) of its
2656    # parent(s), by looking at 'ranges' properties. Returns the translated
2657    # address.
2658
2659    if not node.parent or "ranges" not in node.parent.props:
2660        # No translation
2661        return addr
2662
2663    if not node.parent.props["ranges"].value:
2664        # DT spec.: "If the property is defined with an <empty> value, it
2665        # specifies that the parent and child address space is identical, and
2666        # no address translation is required."
2667        #
2668        # Treat this the same as a 'range' that explicitly does a one-to-one
2669        # mapping, as opposed to there not being any translation.
2670        return _translate(addr, node.parent)
2671
2672    # Gives the size of each component in a translation 3-tuple in 'ranges'
2673    child_address_cells = _address_cells(node)
2674    parent_address_cells = _address_cells(node.parent)
2675    child_size_cells = _size_cells(node)
2676
2677    # Number of cells for one translation 3-tuple in 'ranges'
2678    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2679
2680    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2681                            f"4*(<#address-cells> (= {child_address_cells}) + "
2682                            "<#address-cells for parent> "
2683                            f"(= {parent_address_cells}) + "
2684                            f"<#size-cells> (= {child_size_cells}))"):
2685        child_addr = to_num(raw_range[:4*child_address_cells])
2686        raw_range = raw_range[4*child_address_cells:]
2687
2688        parent_addr = to_num(raw_range[:4*parent_address_cells])
2689        raw_range = raw_range[4*parent_address_cells:]
2690
2691        child_len = to_num(raw_range)
2692
2693        if child_addr <= addr < child_addr + child_len:
2694            # 'addr' is within range of a translation in 'ranges'. Recursively
2695            # translate it and return the result.
2696            return _translate(parent_addr + addr - child_addr, node.parent)
2697
2698    # 'addr' is not within range of any translation in 'ranges'
2699    return addr
2700
2701
2702def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2703    # Helper for registering names from <foo>-names properties.
2704    #
2705    # node:
2706    #   Node which has a property that might need named elements.
2707    #
2708    # names-ident:
2709    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2710    #
2711    # objs:
2712    #   list of objects whose .name field should be set
2713
2714    full_names_ident = names_ident + "-names"
2715
2716    if full_names_ident in node.props:
2717        names = node.props[full_names_ident].to_strings()
2718        if len(names) != len(objs):
2719            _err(f"{full_names_ident} property in {node.path} "
2720                 f"in {node.dt.filename} has {len(names)} strings, "
2721                 f"expected {len(objs)} strings")
2722
2723        for obj, name in zip(objs, names):
2724            if obj is None:
2725                continue
2726            obj.name = name
2727    else:
2728        for obj in objs:
2729            if obj is not None:
2730                obj.name = None
2731
2732
2733def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2734    # Returns the node pointed at by the closest 'interrupt-parent', searching
2735    # the parents of 'node'. As of writing, this behavior isn't specified in
2736    # the DT spec., but seems to match what some .dts files except.
2737
2738    node: Optional[dtlib_Node] = start_node
2739
2740    while node:
2741        if "interrupt-parent" in node.props:
2742            return node.props["interrupt-parent"].to_node()
2743        node = node.parent
2744
2745    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2746         f"nor any of its parents has an 'interrupt-parent' property")
2747
2748
2749def _interrupts(node: dtlib_Node) -> List[Tuple[dtlib_Node, bytes]]:
2750    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2751    # interrupt generated by 'node'. <controller> is the destination of the
2752    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2753    # the data associated with the interrupt (as a 'bytes' object).
2754
2755    # Takes precedence over 'interrupts' if both are present
2756    if "interrupts-extended" in node.props:
2757        prop = node.props["interrupts-extended"]
2758
2759        ret: List[Tuple[dtlib_Node, bytes]] = []
2760        for entry in _phandle_val_list(prop, "interrupt"):
2761            if entry is None:
2762                _err(f"node '{node.path}' interrupts-extended property "
2763                     "has an empty element")
2764            iparent, spec = entry
2765            ret.append(_map_interrupt(node, iparent, spec))
2766        return ret
2767
2768    if "interrupts" in node.props:
2769        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2770        # the same interrupt parent for all interrupts
2771
2772        iparent = _interrupt_parent(node)
2773        interrupt_cells = _interrupt_cells(iparent)
2774
2775        return [_map_interrupt(node, iparent, raw)
2776                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2777                                  "4*<#interrupt-cells>")]
2778
2779    return []
2780
2781
2782def _map_interrupt(
2783        child: dtlib_Node,
2784        parent: dtlib_Node,
2785        child_spec: bytes
2786) -> Tuple[dtlib_Node, bytes]:
2787    # Translates an interrupt headed from 'child' to 'parent' with data
2788    # 'child_spec' through any 'interrupt-map' properties. Returns a
2789    # (<controller>, <data>) tuple with the final destination after mapping.
2790
2791    if "interrupt-controller" in parent.props:
2792        return (parent, child_spec)
2793
2794    def own_address_cells(node):
2795        # Used for parents pointed at by 'interrupt-map'. We can't use
2796        # _address_cells(), because it's the #address-cells property on 'node'
2797        # itself that matters.
2798
2799        address_cells = node.props.get("#address-cells")
2800        if not address_cells:
2801            _err(f"missing #address-cells on {node!r} "
2802                 "(while handling interrupt-map)")
2803        return address_cells.to_num()
2804
2805    def spec_len_fn(node):
2806        # Can't use _address_cells() here, because it's the #address-cells
2807        # property on 'node' itself that matters
2808        return own_address_cells(node) + _interrupt_cells(node)
2809
2810    parent, raw_spec = _map(
2811        "interrupt", child, parent, _raw_unit_addr(child) + child_spec,
2812        spec_len_fn, require_controller=True)
2813
2814    # Strip the parent unit address part, if any
2815    return (parent, raw_spec[4*own_address_cells(parent):])
2816
2817
2818def _map_phandle_array_entry(
2819        child: dtlib_Node,
2820        parent: dtlib_Node,
2821        child_spec: bytes,
2822        basename: str
2823) -> Tuple[dtlib_Node, bytes]:
2824    # Returns a (<controller>, <data>) tuple with the final destination after
2825    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
2826    # _map_interrupt().
2827
2828    def spec_len_fn(node):
2829        prop_name = f"#{basename}-cells"
2830        if prop_name not in node.props:
2831            _err(f"expected '{prop_name}' property on {node!r} "
2832                 f"(referenced by {child!r})")
2833        return node.props[prop_name].to_num()
2834
2835    # Do not require <prefix>-controller for anything but interrupts for now
2836    return _map(basename, child, parent, child_spec, spec_len_fn,
2837                require_controller=False)
2838
2839
2840def _map(
2841        prefix: str,
2842        child: dtlib_Node,
2843        parent: dtlib_Node,
2844        child_spec: bytes,
2845        spec_len_fn: Callable[[dtlib_Node], int],
2846        require_controller: bool
2847) -> Tuple[dtlib_Node, bytes]:
2848    # Common code for mapping through <prefix>-map properties, e.g.
2849    # interrupt-map and gpio-map.
2850    #
2851    # prefix:
2852    #   The prefix, e.g. "interrupt" or "gpio"
2853    #
2854    # child:
2855    #   The "sender", e.g. the node with 'interrupts = <...>'
2856    #
2857    # parent:
2858    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
2859    #   'interrupt-controller' (no mapping)
2860    #
2861    # child_spec:
2862    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
2863    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
2864    #
2865    # spec_len_fn:
2866    #   Function called on a parent specified in a *-map property to get the
2867    #   length of the parent specifier (data after phandle in *-map), in cells
2868    #
2869    # require_controller:
2870    #   If True, the final controller node after mapping is required to have
2871    #   to have a <prefix>-controller property.
2872
2873    map_prop = parent.props.get(prefix + "-map")
2874    if not map_prop:
2875        if require_controller and prefix + "-controller" not in parent.props:
2876            _err(f"expected '{prefix}-controller' property on {parent!r} "
2877                 f"(referenced by {child!r})")
2878
2879        # No mapping
2880        return (parent, child_spec)
2881
2882    masked_child_spec = _mask(prefix, child, parent, child_spec)
2883
2884    raw = map_prop.value
2885    while raw:
2886        if len(raw) < len(child_spec):
2887            _err(f"bad value for {map_prop!r}, missing/truncated child data")
2888        child_spec_entry = raw[:len(child_spec)]
2889        raw = raw[len(child_spec):]
2890
2891        if len(raw) < 4:
2892            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
2893        phandle = to_num(raw[:4])
2894        raw = raw[4:]
2895
2896        # Parent specified in *-map
2897        map_parent = parent.dt.phandle2node.get(phandle)
2898        if not map_parent:
2899            _err(f"bad phandle ({phandle}) in {map_prop!r}")
2900
2901        map_parent_spec_len = 4*spec_len_fn(map_parent)
2902        if len(raw) < map_parent_spec_len:
2903            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
2904        parent_spec = raw[:map_parent_spec_len]
2905        raw = raw[map_parent_spec_len:]
2906
2907        # Got one *-map row. Check if it matches the child data.
2908        if child_spec_entry == masked_child_spec:
2909            # Handle *-map-pass-thru
2910            parent_spec = _pass_thru(
2911                prefix, child, parent, child_spec, parent_spec)
2912
2913            # Found match. Recursively map and return it.
2914            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
2915                        require_controller)
2916
2917    _err(f"child specifier for {child!r} ({child_spec!r}) "
2918         f"does not appear in {map_prop!r}")
2919
2920
2921def _mask(
2922        prefix: str,
2923        child: dtlib_Node,
2924        parent: dtlib_Node,
2925        child_spec: bytes
2926) -> bytes:
2927    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
2928    # See _map() for the parameters.
2929
2930    mask_prop = parent.props.get(prefix + "-map-mask")
2931    if not mask_prop:
2932        # No mask
2933        return child_spec
2934
2935    mask = mask_prop.value
2936    if len(mask) != len(child_spec):
2937        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
2938             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
2939
2940    return _and(child_spec, mask)
2941
2942
2943def _pass_thru(
2944        prefix: str,
2945        child: dtlib_Node,
2946        parent: dtlib_Node,
2947        child_spec: bytes,
2948        parent_spec: bytes
2949) -> bytes:
2950    # Common code for handling <prefix>-map-thru properties, e.g.
2951    # interrupt-pass-thru.
2952    #
2953    # parent_spec:
2954    #   The parent data from the matched entry in the <prefix>-map property
2955    #
2956    # See _map() for the other parameters.
2957
2958    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
2959    if not pass_thru_prop:
2960        # No pass-thru
2961        return parent_spec
2962
2963    pass_thru = pass_thru_prop.value
2964    if len(pass_thru) != len(child_spec):
2965        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
2966             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
2967
2968    res = _or(_and(child_spec, pass_thru),
2969              _and(parent_spec, _not(pass_thru)))
2970
2971    # Truncate to length of parent spec.
2972    return res[-len(parent_spec):]
2973
2974
2975def _raw_unit_addr(node: dtlib_Node) -> bytes:
2976    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
2977    # #address-cells) as a raw 'bytes'
2978
2979    if 'reg' not in node.props:
2980        _err(f"{node!r} lacks 'reg' property "
2981             "(needed for 'interrupt-map' unit address lookup)")
2982
2983    addr_len = 4*_address_cells(node)
2984
2985    if len(node.props['reg'].value) < addr_len:
2986        _err(f"{node!r} has too short 'reg' property "
2987             "(while doing 'interrupt-map' unit address lookup)")
2988
2989    return node.props['reg'].value[:addr_len]
2990
2991
2992def _and(b1: bytes, b2: bytes) -> bytes:
2993    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
2994    # with ones on the left if the lengths are not equal.
2995
2996    # Pad on the left, to equal length
2997    maxlen = max(len(b1), len(b2))
2998    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
2999                                       b2.rjust(maxlen, b'\xff')))
3000
3001
3002def _or(b1: bytes, b2: bytes) -> bytes:
3003    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3004    # zeros on the left if the lengths are not equal.
3005
3006    # Pad on the left, to equal length
3007    maxlen = max(len(b1), len(b2))
3008    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3009                                       b2.rjust(maxlen, b'\x00')))
3010
3011
3012def _not(b: bytes) -> bytes:
3013    # Returns the bitwise not of the 'bytes' object 'b'
3014
3015    # ANDing with 0xFF avoids negative numbers
3016    return bytes(~x & 0xFF for x in b)
3017
3018
3019def _phandle_val_list(
3020        prop: dtlib_Property,
3021        n_cells_name: str
3022) -> List[Optional[Tuple[dtlib_Node, bytes]]]:
3023    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3024    # cells that make up each <value> is derived from the node pointed at by
3025    # the preceding <phandle>.
3026    #
3027    # prop:
3028    #   dtlib.Property with value to parse
3029    #
3030    # n_cells_name:
3031    #   The <name> part of the #<name>-cells property to look for on the nodes
3032    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3033    #
3034    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3035    # is the node pointed at by <phandle>. If <phandle> does not refer
3036    # to a node, the entire list element is None.
3037
3038    full_n_cells_name = f"#{n_cells_name}-cells"
3039
3040    res: List[Optional[Tuple[dtlib_Node, bytes]]] = []
3041
3042    raw = prop.value
3043    while raw:
3044        if len(raw) < 4:
3045            # Not enough room for phandle
3046            _err("bad value for " + repr(prop))
3047        phandle = to_num(raw[:4])
3048        raw = raw[4:]
3049
3050        node = prop.node.dt.phandle2node.get(phandle)
3051        if not node:
3052            # Unspecified phandle-array element. This is valid; a 0
3053            # phandle value followed by no cells is an empty element.
3054            res.append(None)
3055            continue
3056
3057        if full_n_cells_name not in node.props:
3058            _err(f"{node!r} lacks {full_n_cells_name}")
3059
3060        n_cells = node.props[full_n_cells_name].to_num()
3061        if len(raw) < 4*n_cells:
3062            _err("missing data after phandle in " + repr(prop))
3063
3064        res.append((node, raw[:4*n_cells]))
3065        raw = raw[4*n_cells:]
3066
3067    return res
3068
3069
3070def _address_cells(node: dtlib_Node) -> int:
3071    # Returns the #address-cells setting for 'node', giving the number of <u32>
3072    # cells used to encode the address in the 'reg' property
3073    if TYPE_CHECKING:
3074        assert node.parent
3075
3076    if "#address-cells" in node.parent.props:
3077        return node.parent.props["#address-cells"].to_num()
3078    return 2  # Default value per DT spec.
3079
3080
3081def _size_cells(node: dtlib_Node) -> int:
3082    # Returns the #size-cells setting for 'node', giving the number of <u32>
3083    # cells used to encode the size in the 'reg' property
3084    if TYPE_CHECKING:
3085        assert node.parent
3086
3087    if "#size-cells" in node.parent.props:
3088        return node.parent.props["#size-cells"].to_num()
3089    return 1  # Default value per DT spec.
3090
3091
3092def _interrupt_cells(node: dtlib_Node) -> int:
3093    # Returns the #interrupt-cells property value on 'node', erroring out if
3094    # 'node' has no #interrupt-cells property
3095
3096    if "#interrupt-cells" not in node.props:
3097        _err(f"{node!r} lacks #interrupt-cells")
3098    return node.props["#interrupt-cells"].to_num()
3099
3100
3101def _slice(node: dtlib_Node,
3102           prop_name: str,
3103           size: int,
3104           size_hint: str) -> List[bytes]:
3105    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3106
3107
3108def _check_dt(dt: DT) -> None:
3109    # Does devicetree sanity checks. dtlib is meant to be general and
3110    # anything-goes except for very special properties like phandle, but in
3111    # edtlib we can be pickier.
3112
3113    # Check that 'status' has one of the values given in the devicetree spec.
3114
3115    # Accept "ok" for backwards compatibility
3116    ok_status = {"ok", "okay", "disabled", "reserved", "fail", "fail-sss"}
3117
3118    for node in dt.node_iter():
3119        if "status" in node.props:
3120            try:
3121                status_val = node.props["status"].to_string()
3122            except DTError as e:
3123                # The error message gives the path
3124                _err(str(e))
3125
3126            if status_val not in ok_status:
3127                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3128                     f"in {node.dt.filename}, expected one of " +
3129                     ", ".join(ok_status) +
3130                     " (see the devicetree specification)")
3131
3132        ranges_prop = node.props.get("ranges")
3133        if ranges_prop:
3134            if ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3135                _err(f"expected 'ranges = < ... >;' in {node.path} in "
3136                     f"{node.dt.filename}, not '{ranges_prop}' "
3137                     "(see the devicetree specification)")
3138
3139
3140def _err(msg) -> NoReturn:
3141    raise EDTError(msg)
3142
3143# Logging object
3144_LOG = logging.getLogger(__name__)
3145
3146# Regular expression for non-alphanumeric-or-underscore characters.
3147_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3148
3149
3150def str_as_token(val: str) -> str:
3151    """Return a canonical representation of a string as a C token.
3152
3153    This converts special characters in 'val' to underscores, and
3154    returns the result."""
3155
3156    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3157
3158
3159# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3160# which could interfere with YAML loading in clients
3161class _BindingLoader(Loader):
3162    pass
3163
3164
3165# Add legacy '!include foo.yaml' handling
3166_BindingLoader.add_constructor("!include", _binding_include)
3167
3168#
3169# "Default" binding for properties which are defined by the spec.
3170#
3171# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3172# updating the documentation for the DT_PROP() macro in
3173# include/devicetree.h.
3174#
3175
3176_DEFAULT_PROP_TYPES: Dict[str, str] = {
3177    "compatible": "string-array",
3178    "status": "string",
3179    "reg": "array",
3180    "reg-names": "string-array",
3181    "label": "string",
3182    "interrupts": "array",
3183    "interrupts-extended": "compound",
3184    "interrupt-names": "string-array",
3185    "interrupt-controller": "boolean",
3186}
3187
3188_STATUS_ENUM: List[str] = "ok okay disabled reserved fail fail-sss".split()
3189
3190def _raw_default_property_for(
3191        name: str
3192) -> Dict[str, Union[str, bool, List[str]]]:
3193    ret: Dict[str, Union[str, bool, List[str]]] = {
3194        'type': _DEFAULT_PROP_TYPES[name],
3195        'required': False,
3196    }
3197    if name == 'status':
3198        ret['enum'] = _STATUS_ENUM
3199    return ret
3200
3201_DEFAULT_PROP_BINDING: Binding = Binding(
3202    None, {},
3203    raw={
3204        'properties': {
3205            name: _raw_default_property_for(name)
3206            for name in _DEFAULT_PROP_TYPES
3207        },
3208    },
3209    require_compatible=False, require_description=False,
3210)
3211
3212_DEFAULT_PROP_SPECS: Dict[str, PropertySpec] = {
3213    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3214    for name in _DEFAULT_PROP_TYPES
3215}
3216
3217# A set of vendor prefixes which are grandfathered in by Linux,
3218# and therefore by us as well.
3219_VENDOR_PREFIX_ALLOWED: Set[str] = set([
3220    "at25", "bm", "devbus", "dmacap", "dsa",
3221    "exynos", "fsia", "fsib", "gpio-fan", "gpio-key", "gpio", "gpmc",
3222    "hdmi", "i2c-gpio", "keypad", "m25p", "max8952", "max8997",
3223    "max8998", "mpmc", "pinctrl-single", "#pinctrl-single", "PowerPC",
3224    "pl022", "pxa-mmc", "rcar_sound", "rotary-encoder", "s5m8767",
3225    "sdhci", "simple-audio-card", "st-plgpio", "st-spics", "ts",
3226])
3227