1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# SPDX-License-Identifier: BSD-3-Clause
4
5# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
6
7"""
8Library for working with devicetrees at a higher level compared to dtlib. Like
9dtlib, this library presents a tree of devicetree nodes, but the nodes are
10augmented with information from bindings and include some interpretation of
11properties. Some of this interpretation is based on conventions established
12by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
13source code is sometimes good reference material.
14
15Bindings are YAML files that describe devicetree nodes. Devicetree
16nodes are usually mapped to bindings via their 'compatible = "..."' property,
17but a binding can also come from a 'child-binding:' key in the binding for the
18parent devicetree node.
19
20Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
21which has all the information related to the node.
22
23The top-level entry points for the library are the EDT and Binding classes.
24See their constructor docstrings for details. There is also a
25bindings_from_paths() helper function.
26"""
27
28# NOTE: tests/test_edtlib.py is the test suite for this library.
29
30# Implementation notes
31# --------------------
32#
33# A '_' prefix on an identifier in Python is a convention for marking it private.
34# Please do not access private things. Instead, think of what API you need, and
35# add it.
36#
37# This module is not meant to have any global state. It should be possible to
38# create several EDT objects with independent binding paths and flags. If you
39# need to add a configuration parameter or the like, store it in the EDT
40# instance, and initialize it e.g. with a constructor argument.
41#
42# This library is layered on top of dtlib, and is not meant to expose it to
43# clients. This keeps the header generation script simple.
44#
45# General biased advice:
46#
47# - Consider using @property for APIs that don't need parameters. It makes
48#   functions look like attributes, which is less awkward in clients, and makes
49#   it easy to switch back and forth between variables and functions.
50#
51# - Think about the data type of the thing you're exposing. Exposing something
52#   as e.g. a list or a dictionary is often nicer and more flexible than adding
53#   a function.
54#
55# - Avoid get_*() prefixes on functions. Name them after the thing they return
56#   instead. This often makes the code read more naturally in callers.
57#
58#   Also, consider using @property instead of get_*().
59#
60# - Don't expose dtlib stuff directly.
61#
62# - Add documentation for any new APIs you add.
63#
64#   The convention here is that docstrings (quoted strings) are used for public
65#   APIs, and "doc comments" for internal functions.
66#
67#   @properties are documented in the class docstring, as if they were
68#   variables. See the existing @properties for a template.
69
70from collections import defaultdict
71from copy import deepcopy
72from dataclasses import dataclass
73from typing import Any, Callable, Dict, Iterable, List, NoReturn, \
74    Optional, Set, TYPE_CHECKING, Tuple, Union
75import logging
76import os
77import re
78
79import yaml
80try:
81    # Use the C LibYAML parser if available, rather than the Python parser.
82    # This makes e.g. gen_defines.py more than twice as fast.
83    from yaml import CLoader as Loader
84except ImportError:
85    from yaml import Loader     # type: ignore
86
87from devicetree.dtlib import DT, DTError, to_num, to_nums, Type
88from devicetree.dtlib import Node as dtlib_Node
89from devicetree.dtlib import Property as dtlib_Property
90from devicetree.grutils import Graph
91from devicetree._private import _slice_helper
92
93#
94# Public classes
95#
96
97
98class Binding:
99    """
100    Represents a parsed binding.
101
102    These attributes are available on Binding objects:
103
104    path:
105      The absolute path to the file defining the binding.
106
107    description:
108      The free-form description of the binding, or None.
109
110    compatible:
111      The compatible string the binding matches.
112
113      This may be None. For example, it's None when the Binding is inferred
114      from node properties. It can also be None for Binding objects created
115      using 'child-binding:' with no compatible.
116
117    prop2specs:
118      A dict mapping property names to PropertySpec objects
119      describing those properties' values.
120
121    specifier2cells:
122      A dict that maps specifier space names (like "gpio",
123      "clock", "pwm", etc.) to lists of cell names.
124
125      For example, if the binding YAML contains 'pin' and 'flags' cell names
126      for the 'gpio' specifier space, like this:
127
128          gpio-cells:
129          - pin
130          - flags
131
132      Then the Binding object will have a 'specifier2cells' attribute mapping
133      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
134      cells.
135
136    raw:
137      The binding as an object parsed from YAML.
138
139    bus:
140      If nodes with this binding's 'compatible' describe a bus, a string
141      describing the bus type (like "i2c") or a list describing supported
142      protocols (like ["i3c", "i2c"]). None otherwise.
143
144      Note that this is the raw value from the binding where it can be
145      a string or a list. Use "buses" instead unless you need the raw
146      value, where "buses" is always a list.
147
148    buses:
149      Deprived property from 'bus' where 'buses' is a list of bus(es),
150      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
151      no 'bus:' in this binding.
152
153    on_bus:
154      If nodes with this binding's 'compatible' appear on a bus, a string
155      describing the bus type (like "i2c"). None otherwise.
156
157    child_binding:
158      If this binding describes the properties of child nodes, then
159      this is a Binding object for those children; it is None otherwise.
160      A Binding object's 'child_binding.child_binding' is not None if there
161      are multiple levels of 'child-binding' descriptions in the binding.
162    """
163
164    def __init__(self, path: Optional[str], fname2path: Dict[str, str],
165                 raw: Any = None, require_compatible: bool = True,
166                 require_description: bool = True):
167        """
168        Binding constructor.
169
170        path:
171          Path to binding YAML file. May be None.
172
173        fname2path:
174          Map from include files to their absolute paths. Must
175          not be None, but may be empty.
176
177        raw:
178          Optional raw content in the binding.
179          This does not have to have any "include:" lines resolved.
180          May be left out, in which case 'path' is opened and read.
181          This can be used to resolve child bindings, for example.
182
183        require_compatible:
184          If True, it is an error if the binding does not contain a
185          "compatible:" line. If False, a missing "compatible:" is
186          not an error. Either way, "compatible:" must be a string
187          if it is present in the binding.
188
189        require_description:
190          If True, it is an error if the binding does not contain a
191          "description:" line. If False, a missing "description:" is
192          not an error. Either way, "description:" must be a string
193          if it is present in the binding.
194        """
195        self.path: Optional[str] = path
196        self._fname2path: Dict[str, str] = fname2path
197
198        if raw is None:
199            if path is None:
200                _err("you must provide either a 'path' or a 'raw' argument")
201            with open(path, encoding="utf-8") as f:
202                raw = yaml.load(f, Loader=_BindingLoader)
203
204        # Merge any included files into self.raw. This also pulls in
205        # inherited child binding definitions, so it has to be done
206        # before initializing those.
207        self.raw: dict = self._merge_includes(raw, self.path)
208
209        # Recursively initialize any child bindings. These don't
210        # require a 'compatible' or 'description' to be well defined,
211        # but they must be dicts.
212        if "child-binding" in raw:
213            if not isinstance(raw["child-binding"], dict):
214                _err(f"malformed 'child-binding:' in {self.path}, "
215                     "expected a binding (dictionary with keys/values)")
216            self.child_binding: Optional['Binding'] = Binding(
217                path, fname2path,
218                raw=raw["child-binding"],
219                require_compatible=False,
220                require_description=False)
221        else:
222            self.child_binding = None
223
224        # Make sure this is a well defined object.
225        self._check(require_compatible, require_description)
226
227        # Initialize look up tables.
228        self.prop2specs: Dict[str, 'PropertySpec'] = {}
229        for prop_name in self.raw.get("properties", {}).keys():
230            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
231        self.specifier2cells: Dict[str, List[str]] = {}
232        for key, val in self.raw.items():
233            if key.endswith("-cells"):
234                self.specifier2cells[key[:-len("-cells")]] = val
235
236    def __repr__(self) -> str:
237        if self.compatible:
238            compat = f" for compatible '{self.compatible}'"
239        else:
240            compat = ""
241        basename = os.path.basename(self.path or "")
242        return f"<Binding {basename}" + compat + ">"
243
244    @property
245    def description(self) -> Optional[str]:
246        "See the class docstring"
247        return self.raw.get('description')
248
249    @property
250    def compatible(self) -> Optional[str]:
251        "See the class docstring"
252        return self.raw.get('compatible')
253
254    @property
255    def bus(self) -> Union[None, str, List[str]]:
256        "See the class docstring"
257        return self.raw.get('bus')
258
259    @property
260    def buses(self) -> List[str]:
261        "See the class docstring"
262        if self.raw.get('bus') is not None:
263            return self._buses
264        else:
265            return []
266
267    @property
268    def on_bus(self) -> Optional[str]:
269        "See the class docstring"
270        return self.raw.get('on-bus')
271
272    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
273        # Constructor helper. Merges included files in
274        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
275        # source of include files, removing the "include" key while
276        # doing so.
277        #
278        # This treats 'binding_path' as the binding file being built up
279        # and uses it for error messages.
280
281        if "include" not in raw:
282            return raw
283
284        include = raw.pop("include")
285
286        # First, merge the included files together. If more than one included
287        # file has a 'required:' for a particular property, OR the values
288        # together, so that 'required: true' wins.
289
290        merged: Dict[str, Any] = {}
291
292        if isinstance(include, str):
293            # Simple scalar string case
294            _merge_props(merged, self._load_raw(include), None, binding_path,
295                         False)
296        elif isinstance(include, list):
297            # List of strings and maps. These types may be intermixed.
298            for elem in include:
299                if isinstance(elem, str):
300                    _merge_props(merged, self._load_raw(elem), None,
301                                 binding_path, False)
302                elif isinstance(elem, dict):
303                    name = elem.pop('name', None)
304                    allowlist = elem.pop('property-allowlist', None)
305                    blocklist = elem.pop('property-blocklist', None)
306                    child_filter = elem.pop('child-binding', None)
307
308                    if elem:
309                        # We've popped out all the valid keys.
310                        _err(f"'include:' in {binding_path} should not have "
311                             f"these unexpected contents: {elem}")
312
313                    _check_include_dict(name, allowlist, blocklist,
314                                        child_filter, binding_path)
315
316                    contents = self._load_raw(name)
317
318                    _filter_properties(contents, allowlist, blocklist,
319                                       child_filter, binding_path)
320                    _merge_props(merged, contents, None, binding_path, False)
321                else:
322                    _err(f"all elements in 'include:' in {binding_path} "
323                         "should be either strings or maps with a 'name' key "
324                         "and optional 'property-allowlist' or "
325                         f"'property-blocklist' keys, but got: {elem}")
326        else:
327            # Invalid item.
328            _err(f"'include:' in {binding_path} "
329                 f"should be a string or list, but has type {type(include)}")
330
331        # Next, merge the merged included files into 'raw'. Error out if
332        # 'raw' has 'required: false' while the merged included files have
333        # 'required: true'.
334
335        _merge_props(raw, merged, None, binding_path, check_required=True)
336
337        return raw
338
339    def _load_raw(self, fname: str) -> dict:
340        # Returns the contents of the binding given by 'fname' after merging
341        # any bindings it lists in 'include:' into it. 'fname' is just the
342        # basename of the file, so we check that there aren't multiple
343        # candidates.
344
345        path = self._fname2path.get(fname)
346
347        if not path:
348            _err(f"'{fname}' not found")
349
350        with open(path, encoding="utf-8") as f:
351            contents = yaml.load(f, Loader=_BindingLoader)
352            if not isinstance(contents, dict):
353                _err(f'{path}: invalid contents, expected a mapping')
354
355        return self._merge_includes(contents, path)
356
357    def _check(self, require_compatible: bool, require_description: bool):
358        # Does sanity checking on the binding.
359
360        raw = self.raw
361
362        if "compatible" in raw:
363            compatible = raw["compatible"]
364            if not isinstance(compatible, str):
365                _err(f"malformed 'compatible: {compatible}' "
366                     f"field in {self.path} - "
367                     f"should be a string, not {type(compatible).__name__}")
368        elif require_compatible:
369            _err(f"missing 'compatible' in {self.path}")
370
371        if "description" in raw:
372            description = raw["description"]
373            if not isinstance(description, str) or not description:
374                _err(f"malformed or empty 'description' in {self.path}")
375        elif require_description:
376            _err(f"missing 'description' in {self.path}")
377
378        # Allowed top-level keys. The 'include' key should have been
379        # removed by _load_raw() already.
380        ok_top = {"description", "compatible", "bus", "on-bus",
381                  "properties", "child-binding"}
382
383        # Descriptive errors for legacy bindings.
384        legacy_errors = {
385            "#cells": "expected *-cells syntax",
386            "child": "use 'bus: <bus>' instead",
387            "child-bus": "use 'bus: <bus>' instead",
388            "parent": "use 'on-bus: <bus>' instead",
389            "parent-bus": "use 'on-bus: <bus>' instead",
390            "sub-node": "use 'child-binding' instead",
391            "title": "use 'description' instead",
392        }
393
394        for key in raw:
395            if key in legacy_errors:
396                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
397
398            if key not in ok_top and not key.endswith("-cells"):
399                _err(f"unknown key '{key}' in {self.path}, "
400                     "expected one of {', '.join(ok_top)}, or *-cells")
401
402        if "bus" in raw:
403            bus = raw["bus"]
404            if not isinstance(bus, str) and \
405               (not isinstance(bus, list) and \
406                not all(isinstance(elem, str) for elem in bus)):
407                _err(f"malformed 'bus:' value in {self.path}, "
408                     "expected string or list of strings")
409
410            if isinstance(bus, list):
411                self._buses = bus
412            else:
413                # Convert bus into a list
414                self._buses = [bus]
415
416        if "on-bus" in raw and \
417           not isinstance(raw["on-bus"], str):
418            _err(f"malformed 'on-bus:' value in {self.path}, "
419                 "expected string")
420
421        self._check_properties()
422
423        for key, val in raw.items():
424            if key.endswith("-cells"):
425                if not isinstance(val, list) or \
426                   not all(isinstance(elem, str) for elem in val):
427                    _err(f"malformed '{key}:' in {self.path}, "
428                         "expected a list of strings")
429
430    def _check_properties(self) -> None:
431        # _check() helper for checking the contents of 'properties:'.
432
433        raw = self.raw
434
435        if "properties" not in raw:
436            return
437
438        ok_prop_keys = {"description", "type", "required",
439                        "enum", "const", "default", "deprecated",
440                        "specifier-space"}
441
442        for prop_name, options in raw["properties"].items():
443            for key in options:
444                if key not in ok_prop_keys:
445                    _err(f"unknown setting '{key}' in "
446                         f"'properties: {prop_name}: ...' in {self.path}, "
447                         f"expected one of {', '.join(ok_prop_keys)}")
448
449            _check_prop_by_type(prop_name, options, self.path)
450
451            for true_false_opt in ["required", "deprecated"]:
452                if true_false_opt in options:
453                    option = options[true_false_opt]
454                    if not isinstance(option, bool):
455                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
456                             f"for '{prop_name}' in 'properties' in {self.path}, "
457                             "expected true/false")
458
459            if options.get("deprecated") and options.get("required"):
460                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
461                      "have both 'deprecated' and 'required' set")
462
463            if "description" in options and \
464               not isinstance(options["description"], str):
465                _err("missing, malformed, or empty 'description' for "
466                     f"'{prop_name}' in 'properties' in {self.path}")
467
468            if "enum" in options and not isinstance(options["enum"], list):
469                _err(f"enum in {self.path} for property '{prop_name}' "
470                     "is not a list")
471
472
473class PropertySpec:
474    """
475    Represents a "property specification", i.e. the description of a
476    property provided by a binding file, like its type and description.
477
478    These attributes are available on PropertySpec objects:
479
480    binding:
481      The Binding object which defined this property.
482
483    name:
484      The property's name.
485
486    path:
487      The file where this property was defined. In case a binding includes
488      other bindings, this is the file where the property was last modified.
489
490    type:
491      The type of the property as a string, as given in the binding.
492
493    description:
494      The free-form description of the property as a string, or None.
495
496    enum:
497      A list of values the property may take as given in the binding, or None.
498
499    enum_tokenizable:
500      True if enum is not None and all the values in it are tokenizable;
501      False otherwise.
502
503      A property must have string type and an "enum:" in its binding to be
504      tokenizable. Additionally, the "enum:" values must be unique after
505      converting all non-alphanumeric characters to underscores (so "foo bar"
506      and "foo_bar" in the same "enum:" would not be tokenizable).
507
508    enum_upper_tokenizable:
509      Like 'enum_tokenizable', with the additional restriction that the
510      "enum:" values must be unique after uppercasing and converting
511      non-alphanumeric characters to underscores.
512
513    const:
514      The property's constant value as given in the binding, or None.
515
516    default:
517      The property's default value as given in the binding, or None.
518
519    deprecated:
520      True if the property is deprecated; False otherwise.
521
522    required:
523      True if the property is marked required; False otherwise.
524
525    specifier_space:
526      The specifier space for the property as given in the binding, or None.
527    """
528
529    def __init__(self, name: str, binding: Binding):
530        self.binding: Binding = binding
531        self.name: str = name
532        self._raw: Dict[str, Any] = self.binding.raw["properties"][name]
533
534    def __repr__(self) -> str:
535        return f"<PropertySpec {self.name} type '{self.type}'>"
536
537    @property
538    def path(self) -> Optional[str]:
539        "See the class docstring"
540        return self.binding.path
541
542    @property
543    def type(self) -> str:
544        "See the class docstring"
545        return self._raw["type"]
546
547    @property
548    def description(self) -> Optional[str]:
549        "See the class docstring"
550        return self._raw.get("description")
551
552    @property
553    def enum(self) -> Optional[list]:
554        "See the class docstring"
555        return self._raw.get("enum")
556
557    @property
558    def enum_tokenizable(self) -> bool:
559        "See the class docstring"
560        if not hasattr(self, '_enum_tokenizable'):
561            if self.type != 'string' or self.enum is None:
562                self._enum_tokenizable = False
563            else:
564                # Saving _as_tokens here lets us reuse it in
565                # enum_upper_tokenizable.
566                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
567                                          '_', value)
568                                   for value in self.enum]
569                self._enum_tokenizable = (len(self._as_tokens) ==
570                                          len(set(self._as_tokens)))
571
572        return self._enum_tokenizable
573
574    @property
575    def enum_upper_tokenizable(self) -> bool:
576        "See the class docstring"
577        if not hasattr(self, '_enum_upper_tokenizable'):
578            if not self.enum_tokenizable:
579                self._enum_upper_tokenizable = False
580            else:
581                self._enum_upper_tokenizable = \
582                    (len(self._as_tokens) ==
583                     len(set(x.upper() for x in self._as_tokens)))
584        return self._enum_upper_tokenizable
585
586    @property
587    def const(self) -> Union[None, int, List[int], str, List[str]]:
588        "See the class docstring"
589        return self._raw.get("const")
590
591    @property
592    def default(self) -> Union[None, int, List[int], str, List[str]]:
593        "See the class docstring"
594        return self._raw.get("default")
595
596    @property
597    def required(self) -> bool:
598        "See the class docstring"
599        return self._raw.get("required", False)
600
601    @property
602    def deprecated(self) -> bool:
603        "See the class docstring"
604        return self._raw.get("deprecated", False)
605
606    @property
607    def specifier_space(self) -> Optional[str]:
608        "See the class docstring"
609        return self._raw.get("specifier-space")
610
611PropertyValType = Union[int, str,
612                        List[int], List[str],
613                        'Node', List['Node'],
614                        List[Optional['ControllerAndData']],
615                        bytes, None]
616
617
618@dataclass
619class Property:
620    """
621    Represents a property on a Node, as set in its DT node and with
622    additional info from the 'properties:' section of the binding.
623
624    Only properties mentioned in 'properties:' get created. Properties of type
625    'compound' currently do not get Property instances, as it's not clear
626    what to generate for them.
627
628    These attributes are available on Property objects. Several are
629    just convenience accessors for attributes on the PropertySpec object
630    accessible via the 'spec' attribute.
631
632    These attributes are available on Property objects:
633
634    spec:
635      The PropertySpec object which specifies this property.
636
637    val:
638      The value of the property, with the format determined by spec.type,
639      which comes from the 'type:' string in the binding.
640
641        - For 'type: int/array/string/string-array', 'val' is what you'd expect
642          (a Python integer or string, or a list of them)
643
644        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
645          instance
646
647        - For 'type: phandles', 'val' is a list of the pointed-to Node
648          instances
649
650        - For 'type: phandle-array', 'val' is a list of ControllerAndData
651          instances. See the documentation for that class.
652
653    node:
654      The Node instance the property is on
655
656    name:
657      Convenience for spec.name.
658
659    description:
660      Convenience for spec.description with leading and trailing whitespace
661      (including newlines) removed. May be None.
662
663    type:
664      Convenience for spec.type.
665
666    val_as_token:
667      The value of the property as a token, i.e. with non-alphanumeric
668      characters replaced with underscores. This is only safe to access
669      if 'spec.enum_tokenizable' returns True.
670
671    enum_index:
672      The index of 'val' in 'spec.enum' (which comes from the 'enum:' list
673      in the binding), or None if spec.enum is None.
674    """
675
676    spec: PropertySpec
677    val: PropertyValType
678    node: 'Node'
679
680    @property
681    def name(self) -> str:
682        "See the class docstring"
683        return self.spec.name
684
685    @property
686    def description(self) -> Optional[str]:
687        "See the class docstring"
688        return self.spec.description.strip() if self.spec.description else None
689
690    @property
691    def type(self) -> str:
692        "See the class docstring"
693        return self.spec.type
694
695    @property
696    def val_as_token(self) -> str:
697        "See the class docstring"
698        assert isinstance(self.val, str)
699        return str_as_token(self.val)
700
701    @property
702    def enum_index(self) -> Optional[int]:
703        "See the class docstring"
704        enum = self.spec.enum
705        return enum.index(self.val) if enum else None
706
707
708@dataclass
709class Register:
710    """
711    Represents a register on a node.
712
713    These attributes are available on Register objects:
714
715    node:
716      The Node instance this register is from
717
718    name:
719      The name of the register as given in the 'reg-names' property, or None if
720      there is no 'reg-names' property
721
722    addr:
723      The starting address of the register, in the parent address space, or None
724      if #address-cells is zero. Any 'ranges' properties are taken into account.
725
726    size:
727      The length of the register in bytes
728    """
729
730    node: 'Node'
731    name: Optional[str]
732    addr: Optional[int]
733    size: Optional[int]
734
735
736@dataclass
737class Range:
738    """
739    Represents a translation range on a node as described by the 'ranges' property.
740
741    These attributes are available on Range objects:
742
743    node:
744      The Node instance this range is from
745
746    child_bus_cells:
747      The number of cells used to describe a child bus address.
748
749    child_bus_addr:
750      A physical address within the child bus address space, or None if the
751      child's #address-cells equals 0.
752
753    parent_bus_cells:
754      The number of cells used to describe a parent bus address.
755
756    parent_bus_addr:
757      A physical address within the parent bus address space, or None if the
758      parent's #address-cells equals 0.
759
760    length_cells:
761      The number of cells used to describe the size of range in
762      the child's address space.
763
764    length:
765      The size of the range in the child address space, or None if the
766      child's #size-cells equals 0.
767    """
768    node: 'Node'
769    child_bus_cells: int
770    child_bus_addr: Optional[int]
771    parent_bus_cells: int
772    parent_bus_addr: Optional[int]
773    length_cells: int
774    length: Optional[int]
775
776
777@dataclass
778class ControllerAndData:
779    """
780    Represents an entry in an 'interrupts' or 'type: phandle-array' property
781    value, e.g. <&ctrl-1 4 0> in
782
783        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
784
785    These attributes are available on ControllerAndData objects:
786
787    node:
788      The Node instance the property appears on
789
790    controller:
791      The Node instance for the controller (e.g. the controller the interrupt
792      gets sent to for interrupts)
793
794    data:
795      A dictionary that maps names from the *-cells key in the binding for the
796      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
797      above.
798
799      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
800
801    name:
802      The name of the entry as given in
803      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
804      *-names property
805
806    basename:
807      Basename for the controller when supporting named cells
808    """
809    node: 'Node'
810    controller: 'Node'
811    data: dict
812    name: Optional[str]
813    basename: Optional[str]
814
815
816@dataclass
817class PinCtrl:
818    """
819    Represents a pin control configuration for a set of pins on a device,
820    e.g. pinctrl-0 or pinctrl-1.
821
822    These attributes are available on PinCtrl objects:
823
824    node:
825      The Node instance the pinctrl-* property is on
826
827    name:
828      The name of the configuration, as given in pinctrl-names, or None if
829      there is no pinctrl-names property
830
831    name_as_token:
832      Like 'name', but with non-alphanumeric characters converted to underscores.
833
834    conf_nodes:
835      A list of Node instances for the pin configuration nodes, e.g.
836      the nodes pointed at by &state_1 and &state_2 in
837
838          pinctrl-0 = <&state_1 &state_2>;
839    """
840
841    node: 'Node'
842    name: Optional[str]
843    conf_nodes: List['Node']
844
845    @property
846    def name_as_token(self):
847        "See the class docstring"
848        return str_as_token(self.name) if self.name is not None else None
849
850
851class Node:
852    """
853    Represents a devicetree node, augmented with information from bindings, and
854    with some interpretation of devicetree properties. There's a one-to-one
855    correspondence between devicetree nodes and Nodes.
856
857    These attributes are available on Node objects:
858
859    edt:
860      The EDT instance this node is from
861
862    name:
863      The name of the node
864
865    unit_addr:
866      An integer with the ...@<unit-address> portion of the node name,
867      translated through any 'ranges' properties on parent nodes, or None if
868      the node name has no unit-address portion. PCI devices use a different
869      node name format ...@<dev>,<func> or ...@<dev> (e.g. "pcie@1,0"), in
870      this case None is returned.
871
872    description:
873      The description string from the binding for the node, or None if the node
874      has no binding. Leading and trailing whitespace (including newlines) is
875      removed.
876
877    path:
878      The devicetree path of the node
879
880    label:
881      The text from the 'label' property on the node, or None if the node has
882      no 'label'
883
884    labels:
885      A list of all of the devicetree labels for the node, in the same order
886      as the labels appear, but with duplicates removed.
887
888      This corresponds to the actual devicetree source labels, unlike the
889      "label" attribute, which is the value of a devicetree property named
890      "label".
891
892    parent:
893      The Node instance for the devicetree parent of the Node, or None if the
894      node is the root node
895
896    children:
897      A dictionary with the Node instances for the devicetree children of the
898      node, indexed by name
899
900    dep_ordinal:
901      A non-negative integer value such that the value for a Node is
902      less than the value for all Nodes that depend on it.
903
904      The ordinal is defined for all Nodes, and is unique among nodes in its
905      EDT 'nodes' list.
906
907    required_by:
908      A list with the nodes that directly depend on the node
909
910    depends_on:
911      A list with the nodes that the node directly depends on
912
913    status:
914      The node's status property value, as a string, or "okay" if the node
915      has no status property set. If the node's status property is "ok",
916      it is converted to "okay" for consistency.
917
918    read_only:
919      True if the node has a 'read-only' property, and False otherwise
920
921    matching_compat:
922      The 'compatible' string for the binding that matched the node, or None if
923      the node has no binding
924
925    binding_path:
926      The path to the binding file for the node, or None if the node has no
927      binding
928
929    compats:
930      A list of 'compatible' strings for the node, in the same order that
931      they're listed in the .dts file
932
933    ranges:
934      A list of Range objects extracted from the node's ranges property.
935      The list is empty if the node does not have a range property.
936
937    regs:
938      A list of Register objects for the node's registers
939
940    props:
941      A dict that maps property names to Property objects.
942      Property objects are created for all devicetree properties on the node
943      that are mentioned in 'properties:' in the binding.
944
945    aliases:
946      A list of aliases for the node. This is fetched from the /aliases node.
947
948    interrupts:
949      A list of ControllerAndData objects for the interrupts generated by the
950      node. The list is empty if the node does not generate interrupts.
951
952    pinctrls:
953      A list of PinCtrl objects for the pinctrl-<index> properties on the
954      node, sorted by index. The list is empty if the node does not have any
955      pinctrl-<index> properties.
956
957    buses:
958      If the node is a bus node (has a 'bus:' key in its binding), then this
959      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
960      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
961      If the node is not a bus node, then this attribute is an empty list.
962
963    on_buses:
964      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
965      multiple protocols are supported via the same bus. The bus is determined
966      by searching upwards for a parent node whose binding has a 'bus:' key,
967      returning the value of the first 'bus:' key found. If none of the node's
968      parents has a 'bus:' key, this attribute is an empty list.
969
970    bus_node:
971      Like on_bus, but contains the Node for the bus controller, or None if the
972      node is not on a bus.
973
974    flash_controller:
975      The flash controller for the node. Only meaningful for nodes representing
976      flash partitions.
977
978    spi_cs_gpio:
979      The device's SPI GPIO chip select as a ControllerAndData instance, if it
980      exists, and None otherwise. See
981      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
982
983    gpio_hogs:
984      A list of ControllerAndData objects for the GPIOs hogged by the node. The
985      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
986      nodes.
987
988    is_pci_device:
989      True if the node is a PCI device.
990    """
991
992    def __init__(self,
993                 dt_node: dtlib_Node,
994                 edt: 'EDT',
995                 compats: List[str]):
996        '''
997        For internal use only; not meant to be used outside edtlib itself.
998        '''
999        # Public, some of which are initialized properly later:
1000        self.edt: 'EDT' = edt
1001        self.dep_ordinal: int = -1
1002        self.matching_compat: Optional[str] = None
1003        self.binding_path: Optional[str] = None
1004        self.compats: List[str] = compats
1005        self.ranges: List[Range] = []
1006        self.regs: List[Register] = []
1007        self.props: Dict[str, Property] = {}
1008        self.interrupts: List[ControllerAndData] = []
1009        self.pinctrls: List[PinCtrl] = []
1010        self.bus_node: Optional['Node'] = None
1011
1012        # Private, don't touch outside the class:
1013        self._node: dtlib_Node = dt_node
1014        self._binding: Optional[Binding] = None
1015
1016    @property
1017    def name(self) -> str:
1018        "See the class docstring"
1019        return self._node.name
1020
1021    @property
1022    def unit_addr(self) -> Optional[int]:
1023        "See the class docstring"
1024
1025        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1026
1027        # PCI devices use a different node name format (e.g. "pcie@1,0")
1028        if "@" not in self.name or self.is_pci_device:
1029            return None
1030
1031        try:
1032            addr = int(self.name.split("@", 1)[1], 16)
1033        except ValueError:
1034            _err(f"{self!r} has non-hex unit address")
1035
1036        return _translate(addr, self._node)
1037
1038    @property
1039    def description(self) -> Optional[str]:
1040        "See the class docstring."
1041        if self._binding:
1042            return self._binding.description
1043        return None
1044
1045    @property
1046    def path(self) ->  str:
1047        "See the class docstring"
1048        return self._node.path
1049
1050    @property
1051    def label(self) -> Optional[str]:
1052        "See the class docstring"
1053        if "label" in self._node.props:
1054            return self._node.props["label"].to_string()
1055        return None
1056
1057    @property
1058    def labels(self) -> List[str]:
1059        "See the class docstring"
1060        return self._node.labels
1061
1062    @property
1063    def parent(self) -> Optional['Node']:
1064        "See the class docstring"
1065        return self.edt._node2enode.get(self._node.parent) # type: ignore
1066
1067    @property
1068    def children(self) -> Dict[str, 'Node']:
1069        "See the class docstring"
1070        # Could be initialized statically too to preserve identity, but not
1071        # sure if needed. Parent nodes being initialized before their children
1072        # would need to be kept in mind.
1073        return {name: self.edt._node2enode[node]
1074                for name, node in self._node.nodes.items()}
1075
1076    def child_index(self, node) -> int:
1077        """Get the index of *node* in self.children.
1078        Raises KeyError if the argument is not a child of this node.
1079        """
1080        if not hasattr(self, '_child2index'):
1081            # Defer initialization of this lookup table until this
1082            # method is callable to handle parents needing to be
1083            # initialized before their chidlren. By the time we
1084            # return from __init__, 'self.children' is callable.
1085            self._child2index: Dict[str, int] = {}
1086            for index, child_path in enumerate(child.path for child in
1087                                               self.children.values()):
1088                self._child2index[child_path] = index
1089
1090        return self._child2index[node.path]
1091
1092    @property
1093    def required_by(self) -> List['Node']:
1094        "See the class docstring"
1095        return self.edt._graph.required_by(self)
1096
1097    @property
1098    def depends_on(self) -> List['Node']:
1099        "See the class docstring"
1100        return self.edt._graph.depends_on(self)
1101
1102    @property
1103    def status(self) -> str:
1104        "See the class docstring"
1105        status = self._node.props.get("status")
1106
1107        if status is None:
1108            as_string = "okay"
1109        else:
1110            as_string = status.to_string()
1111
1112        if as_string == "ok":
1113            as_string = "okay"
1114
1115        return as_string
1116
1117    @property
1118    def read_only(self) -> bool:
1119        "See the class docstring"
1120        return "read-only" in self._node.props
1121
1122    @property
1123    def aliases(self) -> List[str]:
1124        "See the class docstring"
1125        return [alias for alias, node in self._node.dt.alias2node.items()
1126                if node is self._node]
1127
1128    @property
1129    def buses(self) -> List[str]:
1130        "See the class docstring"
1131        if self._binding:
1132            return self._binding.buses
1133        return []
1134
1135    @property
1136    def on_buses(self) -> List[str]:
1137        "See the class docstring"
1138        bus_node = self.bus_node
1139        return bus_node.buses if bus_node else []
1140
1141    @property
1142    def flash_controller(self) -> 'Node':
1143        "See the class docstring"
1144
1145        # The node path might be something like
1146        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1147        # up two levels to get the flash and check its compat. The flash
1148        # controller might be the flash itself (for cases like NOR flashes).
1149        # For the case of 'soc-nv-flash', we assume the controller is the
1150        # parent of the flash node.
1151
1152        if not self.parent or not self.parent.parent:
1153            _err(f"flash partition {self!r} lacks parent or grandparent node")
1154
1155        controller = self.parent.parent
1156        if controller.matching_compat == "soc-nv-flash":
1157            if controller.parent is None:
1158                _err(f"flash controller '{controller.path}' cannot be the root node")
1159            return controller.parent
1160        return controller
1161
1162    @property
1163    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1164        "See the class docstring"
1165
1166        if not ("spi" in self.on_buses
1167                and self.bus_node
1168                and "cs-gpios" in self.bus_node.props):
1169            return None
1170
1171        if not self.regs:
1172            _err(f"{self!r} needs a 'reg' property, to look up the "
1173                 "chip select index for SPI")
1174
1175        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1176        if TYPE_CHECKING:
1177            assert isinstance(parent_cs_lst, list)
1178
1179        # cs-gpios is indexed by the unit address
1180        cs_index = self.regs[0].addr
1181        if TYPE_CHECKING:
1182            assert isinstance(cs_index, int)
1183
1184        if cs_index >= len(parent_cs_lst):
1185            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1186                 "is >= number of cs-gpios in "
1187                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1188
1189        ret = parent_cs_lst[cs_index]
1190        if TYPE_CHECKING:
1191            assert isinstance(ret, ControllerAndData)
1192        return ret
1193
1194    @property
1195    def gpio_hogs(self) -> List[ControllerAndData]:
1196        "See the class docstring"
1197
1198        if "gpio-hog" not in self.props:
1199            return []
1200
1201        if not self.parent or not "gpio-controller" in self.parent.props:
1202            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1203
1204        if not "#gpio-cells" in self.parent._node.props:
1205            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1206
1207        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1208        res = []
1209
1210        for item in _slice(self._node, "gpios", 4*n_cells,
1211                           f"4*(<#gpio-cells> (= {n_cells})"):
1212            controller = self.parent
1213            res.append(ControllerAndData(
1214                node=self, controller=controller,
1215                data=self._named_cells(controller, item, "gpio"),
1216                name=None, basename="gpio"))
1217
1218        return res
1219
1220    @property
1221    def is_pci_device(self) -> bool:
1222        "See the class docstring"
1223        return 'pcie' in self.on_buses
1224
1225    def __repr__(self) -> str:
1226        if self.binding_path:
1227            binding = "binding " + self.binding_path
1228        else:
1229            binding = "no binding"
1230        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1231
1232    def _init_binding(self) -> None:
1233        # Initializes Node.matching_compat, Node._binding, and
1234        # Node.binding_path.
1235        #
1236        # Node._binding holds the data from the node's binding file, in the
1237        # format returned by PyYAML (plain Python lists, dicts, etc.), or None
1238        # if the node has no binding.
1239
1240        # This relies on the parent of the node having already been
1241        # initialized, which is guaranteed by going through the nodes in
1242        # node_iter() order.
1243
1244        if self.path in self.edt._infer_binding_for_paths:
1245            self._binding_from_properties()
1246            return
1247
1248        if self.compats:
1249            on_buses = self.on_buses
1250
1251            for compat in self.compats:
1252                # When matching, respect the order of the 'compatible' entries,
1253                # and for each one first try to match against an explicitly
1254                # specified bus (if any) and then against any bus. This is so
1255                # that matching against bindings which do not specify a bus
1256                # works the same way in Zephyr as it does elsewhere.
1257                binding = None
1258
1259                for bus in on_buses:
1260                    if (compat, bus) in self.edt._compat2binding:
1261                        binding = self.edt._compat2binding[compat, bus]
1262                        break
1263
1264                if not binding:
1265                    if (compat, None) in self.edt._compat2binding:
1266                        binding = self.edt._compat2binding[compat, None]
1267                    else:
1268                        continue
1269
1270                self.binding_path = binding.path
1271                self.matching_compat = compat
1272                self._binding = binding
1273                return
1274        else:
1275            # No 'compatible' property. See if the parent binding has
1276            # a compatible. This can come from one or more levels of
1277            # nesting with 'child-binding:'.
1278
1279            binding_from_parent = self._binding_from_parent()
1280            if binding_from_parent:
1281                self._binding = binding_from_parent
1282                self.binding_path = self._binding.path
1283                self.matching_compat = self._binding.compatible
1284
1285                return
1286
1287        # No binding found
1288        self._binding = self.binding_path = self.matching_compat = None
1289
1290    def _binding_from_properties(self) -> None:
1291        # Sets up a Binding object synthesized from the properties in the node.
1292
1293        if self.compats:
1294            _err(f"compatible in node with inferred binding: {self.path}")
1295
1296        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1297        raw: Dict[str, Any] = {
1298            'description': 'Inferred binding from properties, via edtlib.',
1299            'properties': {},
1300        }
1301        for name, prop in self._node.props.items():
1302            pp: Dict[str, str] = {}
1303            if prop.type == Type.EMPTY:
1304                pp["type"] = "boolean"
1305            elif prop.type == Type.BYTES:
1306                pp["type"] = "uint8-array"
1307            elif prop.type == Type.NUM:
1308                pp["type"] = "int"
1309            elif prop.type == Type.NUMS:
1310                pp["type"] = "array"
1311            elif prop.type == Type.STRING:
1312                pp["type"] = "string"
1313            elif prop.type == Type.STRINGS:
1314                pp["type"] = "string-array"
1315            elif prop.type == Type.PHANDLE:
1316                pp["type"] = "phandle"
1317            elif prop.type == Type.PHANDLES:
1318                pp["type"] = "phandles"
1319            elif prop.type == Type.PHANDLES_AND_NUMS:
1320                pp["type"] = "phandle-array"
1321            elif prop.type == Type.PATH:
1322                pp["type"] = "path"
1323            else:
1324                _err(f"cannot infer binding from property: {prop} "
1325                     f"with type {prop.type!r}")
1326            raw['properties'][name] = pp
1327
1328        # Set up Node state.
1329        self.binding_path = None
1330        self.matching_compat = None
1331        self.compats = []
1332        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1333
1334    def _binding_from_parent(self) -> Optional[Binding]:
1335        # Returns the binding from 'child-binding:' in the parent node's
1336        # binding.
1337
1338        if not self.parent:
1339            return None
1340
1341        pbinding = self.parent._binding
1342        if not pbinding:
1343            return None
1344
1345        if pbinding.child_binding:
1346            return pbinding.child_binding
1347
1348        return None
1349
1350    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1351                  ) -> Optional['Node']:
1352        # Returns the value for self.bus_node. Relies on parent nodes being
1353        # initialized before their children.
1354
1355        if not self.parent:
1356            # This is the root node
1357            return None
1358
1359        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1360        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1361        # bus be None means we'll always match the binding for fixed-partitions
1362        # also this means want processing the fixed-partitions node we wouldn't
1363        # try to do anything bus specific with it.
1364        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1365            return None
1366
1367        if self.parent.buses:
1368            # The parent node is a bus node
1369            return self.parent
1370
1371        # Same bus node as parent (possibly None)
1372        return self.parent.bus_node
1373
1374    def _init_props(self, default_prop_types: bool = False,
1375                    err_on_deprecated: bool = False) -> None:
1376        # Creates self.props. See the class docstring. Also checks that all
1377        # properties on the node are declared in its binding.
1378
1379        self.props = {}
1380
1381        node = self._node
1382        if self._binding:
1383            prop2specs = self._binding.prop2specs
1384        else:
1385            prop2specs = None
1386
1387        # Initialize self.props
1388        if prop2specs:
1389            for prop_spec in prop2specs.values():
1390                self._init_prop(prop_spec, err_on_deprecated)
1391            self._check_undeclared_props()
1392        elif default_prop_types:
1393            for name in node.props:
1394                if name not in _DEFAULT_PROP_SPECS:
1395                    continue
1396                prop_spec = _DEFAULT_PROP_SPECS[name]
1397                val = self._prop_val(name, prop_spec.type, False, False, None,
1398                                     None, err_on_deprecated)
1399                self.props[name] = Property(prop_spec, val, self)
1400
1401    def _init_prop(self, prop_spec: PropertySpec,
1402                   err_on_deprecated: bool) -> None:
1403        # _init_props() helper for initializing a single property.
1404        # 'prop_spec' is a PropertySpec object from the node's binding.
1405
1406        name = prop_spec.name
1407        prop_type = prop_spec.type
1408        if not prop_type:
1409            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1410
1411        val = self._prop_val(name, prop_type, prop_spec.deprecated,
1412                             prop_spec.required, prop_spec.default,
1413                             prop_spec.specifier_space, err_on_deprecated)
1414
1415        if val is None:
1416            # 'required: false' property that wasn't there, or a property type
1417            # for which we store no data.
1418            return
1419
1420        enum = prop_spec.enum
1421        if enum and val not in enum:
1422            _err(f"value of property '{name}' on {self.path} in "
1423                 f"{self.edt.dts_path} ({val!r}) is not in 'enum' list in "
1424                 f"{self.binding_path} ({enum!r})")
1425
1426        const = prop_spec.const
1427        if const is not None and val != const:
1428            _err(f"value of property '{name}' on {self.path} in "
1429                 f"{self.edt.dts_path} ({val!r}) "
1430                 "is different from the 'const' value specified in "
1431                 f"{self.binding_path} ({const!r})")
1432
1433        # Skip properties that start with '#', like '#size-cells', and mapping
1434        # properties like 'gpio-map'/'interrupt-map'
1435        if name[0] == "#" or name.endswith("-map"):
1436            return
1437
1438        self.props[name] = Property(prop_spec, val, self)
1439
1440    def _prop_val(self, name: str, prop_type: str,
1441                  deprecated: bool, required: bool,
1442                  default: PropertyValType,
1443                  specifier_space: Optional[str],
1444                  err_on_deprecated: bool) -> PropertyValType:
1445        # _init_prop() helper for getting the property's value
1446        #
1447        # name:
1448        #   Property name from binding
1449        #
1450        # prop_type:
1451        #   Property type from binding (a string like "int")
1452        #
1453        # deprecated:
1454        #   True if the property is deprecated
1455        #
1456        # required:
1457        #   True if the property is required to exist
1458        #
1459        # default:
1460        #   Default value to use when the property doesn't exist, or None if
1461        #   the binding doesn't give a default value
1462        #
1463        # specifier_space:
1464        #   Property specifier-space from binding (if prop_type is "phandle-array")
1465        #
1466        # err_on_deprecated:
1467        #   If True, a deprecated property is an error instead of warning.
1468
1469        node = self._node
1470        prop = node.props.get(name)
1471
1472        if prop and deprecated:
1473            msg = (f"'{name}' is marked as deprecated in 'properties:' "
1474                   f"in {self.binding_path} for node {node.path}.")
1475            if err_on_deprecated:
1476                _err(msg)
1477            else:
1478                _LOG.warning(msg)
1479
1480        if not prop:
1481            if required and self.status == "okay":
1482                _err(f"'{name}' is marked as required in 'properties:' in "
1483                     f"{self.binding_path}, but does not appear in {node!r}")
1484
1485            if default is not None:
1486                # YAML doesn't have a native format for byte arrays. We need to
1487                # convert those from an array like [0x12, 0x34, ...]. The
1488                # format has already been checked in
1489                # _check_prop_by_type().
1490                if prop_type == "uint8-array":
1491                    return bytes(default) # type: ignore
1492                return default
1493
1494            return False if prop_type == "boolean" else None
1495
1496        if prop_type == "boolean":
1497            if prop.type != Type.EMPTY:
1498                _err("'{0}' in {1!r} is defined with 'type: boolean' in {2}, "
1499                     "but is assigned a value ('{3}') instead of being empty "
1500                     "('{0};')".format(name, node, self.binding_path, prop))
1501            return True
1502
1503        if prop_type == "int":
1504            return prop.to_num()
1505
1506        if prop_type == "array":
1507            return prop.to_nums()
1508
1509        if prop_type == "uint8-array":
1510            return prop.to_bytes()
1511
1512        if prop_type == "string":
1513            return prop.to_string()
1514
1515        if prop_type == "string-array":
1516            return prop.to_strings()
1517
1518        if prop_type == "phandle":
1519            return self.edt._node2enode[prop.to_node()]
1520
1521        if prop_type == "phandles":
1522            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1523
1524        if prop_type == "phandle-array":
1525            # This type is a bit high-level for dtlib as it involves
1526            # information from bindings and *-names properties, so there's no
1527            # to_phandle_array() in dtlib. Do the type check ourselves.
1528            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1529                _err(f"expected property '{name}' in {node.path} in "
1530                     f"{node.dt.filename} to be assigned "
1531                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1532                     f"(a mix of phandles and numbers), not '{prop}'")
1533
1534            return self._standard_phandle_val_list(prop, specifier_space)
1535
1536        if prop_type == "path":
1537            return self.edt._node2enode[prop.to_path()]
1538
1539        # prop_type == "compound". Checking that the 'type:'
1540        # value is valid is done in _check_prop_by_type().
1541        #
1542        # 'compound' is a dummy type for properties that don't fit any of the
1543        # patterns above, so that we can require all entries in 'properties:'
1544        # to have a 'type: ...'. No Property object is created for it.
1545        return None
1546
1547    def _check_undeclared_props(self) -> None:
1548        # Checks that all properties are declared in the binding
1549
1550        for prop_name in self._node.props:
1551            # Allow a few special properties to not be declared in the binding
1552            if prop_name.endswith("-controller") or \
1553               prop_name.startswith("#") or \
1554               prop_name in {
1555                   "compatible", "status", "ranges", "phandle",
1556                   "interrupt-parent", "interrupts-extended", "device_type"}:
1557                continue
1558
1559            if TYPE_CHECKING:
1560                assert self._binding
1561
1562            if prop_name not in self._binding.prop2specs:
1563                _err(f"'{prop_name}' appears in {self._node.path} in "
1564                     f"{self.edt.dts_path}, but is not declared in "
1565                     f"'properties:' in {self.binding_path}")
1566
1567    def _init_ranges(self) -> None:
1568        # Initializes self.ranges
1569        node = self._node
1570
1571        self.ranges = []
1572
1573        if "ranges" not in node.props:
1574            return
1575
1576        raw_child_address_cells = node.props.get("#address-cells")
1577        parent_address_cells = _address_cells(node)
1578        if raw_child_address_cells is None:
1579            child_address_cells = 2 # Default value per DT spec.
1580        else:
1581            child_address_cells = raw_child_address_cells.to_num()
1582        raw_child_size_cells = node.props.get("#size-cells")
1583        if raw_child_size_cells is None:
1584            child_size_cells = 1 # Default value per DT spec.
1585        else:
1586            child_size_cells = raw_child_size_cells.to_num()
1587
1588        # Number of cells for one translation 3-tuple in 'ranges'
1589        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1590
1591        if entry_cells == 0:
1592            if len(node.props["ranges"].value) == 0:
1593                return
1594            else:
1595                _err(f"'ranges' should be empty in {self._node.path} since "
1596                     f"<#address-cells> = {child_address_cells}, "
1597                     f"<#address-cells for parent> = {parent_address_cells} and "
1598                     f"<#size-cells> = {child_size_cells}")
1599
1600        for raw_range in _slice(node, "ranges", 4*entry_cells,
1601                                f"4*(<#address-cells> (= {child_address_cells}) + "
1602                                "<#address-cells for parent> "
1603                                f"(= {parent_address_cells}) + "
1604                                f"<#size-cells> (= {child_size_cells}))"):
1605
1606            child_bus_cells = child_address_cells
1607            if child_address_cells == 0:
1608                child_bus_addr = None
1609            else:
1610                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1611            parent_bus_cells = parent_address_cells
1612            if parent_address_cells == 0:
1613                parent_bus_addr = None
1614            else:
1615                parent_bus_addr = to_num(
1616                    raw_range[(4*child_address_cells):
1617                              (4*child_address_cells + 4*parent_address_cells)])
1618            length_cells = child_size_cells
1619            if child_size_cells == 0:
1620                length = None
1621            else:
1622                length = to_num(
1623                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1624
1625            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1626                                     parent_bus_cells, parent_bus_addr,
1627                                     length_cells, length))
1628
1629    def _init_regs(self) -> None:
1630        # Initializes self.regs
1631
1632        node = self._node
1633
1634        self.regs = []
1635
1636        if "reg" not in node.props:
1637            return
1638
1639        address_cells = _address_cells(node)
1640        size_cells = _size_cells(node)
1641
1642        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1643                              f"4*(<#address-cells> (= {address_cells}) + "
1644                              f"<#size-cells> (= {size_cells}))"):
1645            if address_cells == 0:
1646                addr = None
1647            else:
1648                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1649            if size_cells == 0:
1650                size = None
1651            else:
1652                size = to_num(raw_reg[4*address_cells:])
1653            # Size zero is ok for PCI devices
1654            if size_cells != 0 and size == 0 and not self.is_pci_device:
1655                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1656                     "(maybe you want a size of one or #size-cells = 0 "
1657                     "instead)")
1658
1659            # We'll fix up the name when we're done.
1660            self.regs.append(Register(self, None, addr, size))
1661
1662        _add_names(node, "reg", self.regs)
1663
1664    def _init_pinctrls(self) -> None:
1665        # Initializes self.pinctrls from any pinctrl-<index> properties
1666
1667        node = self._node
1668
1669        # pinctrl-<index> properties
1670        pinctrl_props = [prop for name, prop in node.props.items()
1671                         if re.match("pinctrl-[0-9]+", name)]
1672        # Sort by index
1673        pinctrl_props.sort(key=lambda prop: prop.name)
1674
1675        # Check indices
1676        for i, prop in enumerate(pinctrl_props):
1677            if prop.name != "pinctrl-" + str(i):
1678                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1679                     "- indices should be contiguous and start from zero")
1680
1681        self.pinctrls = []
1682        for prop in pinctrl_props:
1683            # We'll fix up the names below.
1684            self.pinctrls.append(PinCtrl(
1685                node=self,
1686                name=None,
1687                conf_nodes=[self.edt._node2enode[node]
1688                            for node in prop.to_nodes()]))
1689
1690        _add_names(node, "pinctrl", self.pinctrls)
1691
1692    def _init_interrupts(self) -> None:
1693        # Initializes self.interrupts
1694
1695        node = self._node
1696
1697        self.interrupts = []
1698
1699        for controller_node, data in _interrupts(node):
1700            # We'll fix up the names below.
1701            controller = self.edt._node2enode[controller_node]
1702            self.interrupts.append(ControllerAndData(
1703                node=self, controller=controller,
1704                data=self._named_cells(controller, data, "interrupt"),
1705                name=None, basename=None))
1706
1707        _add_names(node, "interrupt", self.interrupts)
1708
1709    def _standard_phandle_val_list(
1710            self,
1711            prop: dtlib_Property,
1712            specifier_space: Optional[str]
1713    ) -> List[Optional[ControllerAndData]]:
1714        # Parses a property like
1715        #
1716        #     <prop.name> = <phandle cell phandle cell ...>;
1717        #
1718        # where each phandle points to a controller node that has a
1719        #
1720        #     #<specifier_space>-cells = <size>;
1721        #
1722        # property that gives the number of cells in the value after the
1723        # controller's phandle in the property.
1724        #
1725        # E.g. with a property like
1726        #
1727        #     pwms = <&foo 1 2 &bar 3>;
1728        #
1729        # If 'specifier_space' is "pwm", then we should have this elsewhere
1730        # in the tree:
1731        #
1732        #     foo: ... {
1733        #             #pwm-cells = <2>;
1734        #     };
1735        #
1736        #     bar: ... {
1737        #             #pwm-cells = <1>;
1738        #     };
1739        #
1740        # These values can be given names using the <specifier_space>-names:
1741        # list in the binding for the phandle nodes.
1742        #
1743        # Also parses any
1744        #
1745        #     <specifier_space>-names = "...", "...", ...
1746        #
1747        # Returns a list of Optional[ControllerAndData] instances.
1748        #
1749        # An index is None if the underlying phandle-array element is
1750        # unspecified.
1751
1752        if not specifier_space:
1753            if prop.name.endswith("gpios"):
1754                # There's some slight special-casing for *-gpios properties in that
1755                # e.g. foo-gpios still maps to #gpio-cells rather than
1756                # #foo-gpio-cells
1757                specifier_space = "gpio"
1758            else:
1759                # Strip -s. We've already checked that property names end in -s
1760                # if there is no specifier space in _check_prop_by_type().
1761                specifier_space = prop.name[:-1]
1762
1763        res: List[Optional[ControllerAndData]] = []
1764
1765        for item in _phandle_val_list(prop, specifier_space):
1766            if item is None:
1767                res.append(None)
1768                continue
1769
1770            controller_node, data = item
1771            mapped_controller, mapped_data = \
1772                _map_phandle_array_entry(prop.node, controller_node, data,
1773                                         specifier_space)
1774
1775            controller = self.edt._node2enode[mapped_controller]
1776            # We'll fix up the names below.
1777            res.append(ControllerAndData(
1778                node=self, controller=controller,
1779                data=self._named_cells(controller, mapped_data,
1780                                       specifier_space),
1781                name=None, basename=specifier_space))
1782
1783        _add_names(self._node, specifier_space, res)
1784
1785        return res
1786
1787    def _named_cells(
1788            self,
1789            controller: 'Node',
1790            data: bytes,
1791            basename: str
1792    ) -> Dict[str, int]:
1793        # Returns a dictionary that maps <basename>-cells names given in the
1794        # binding for 'controller' to cell values. 'data' is the raw data, as a
1795        # byte array.
1796
1797        if not controller._binding:
1798            _err(f"{basename} controller {controller._node!r} "
1799                 f"for {self._node!r} lacks binding")
1800
1801        if basename in controller._binding.specifier2cells:
1802            cell_names: List[str] = controller._binding.specifier2cells[basename]
1803        else:
1804            # Treat no *-cells in the binding the same as an empty *-cells, so
1805            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1806            # '#clock-cells = <0>'.
1807            cell_names = []
1808
1809        data_list = to_nums(data)
1810        if len(data_list) != len(cell_names):
1811            _err(f"unexpected '{basename}-cells:' length in binding for "
1812                 f"{controller._node!r} - {len(cell_names)} "
1813                 f"instead of {len(data_list)}")
1814
1815        return dict(zip(cell_names, data_list))
1816
1817
1818class EDT:
1819    """
1820    Represents a devicetree augmented with information from bindings.
1821
1822    These attributes are available on EDT objects:
1823
1824    nodes:
1825      A list of Node objects for the nodes that appear in the devicetree
1826
1827    compat2nodes:
1828      A collections.defaultdict that maps each 'compatible' string that appears
1829      on some Node to a list of Nodes with that compatible.
1830
1831    compat2okay:
1832      Like compat2nodes, but just for nodes with status 'okay'.
1833
1834    compat2vendor:
1835      A collections.defaultdict that maps each 'compatible' string that appears
1836      on some Node to a vendor name parsed from vendor_prefixes.
1837
1838    compat2model:
1839      A collections.defaultdict that maps each 'compatible' string that appears
1840      on some Node to a model name parsed from that compatible.
1841
1842    label2node:
1843      A dict that maps a node label to the node with that label.
1844
1845    dep_ord2node:
1846      A dict that maps an ordinal to the node with that dependency ordinal.
1847
1848    chosen_nodes:
1849      A dict that maps the properties defined on the devicetree's /chosen
1850      node to their values. 'chosen' is indexed by property name (a string),
1851      and values are converted to Node objects. Note that properties of the
1852      /chosen node which can't be converted to a Node are not included in
1853      the value.
1854
1855    dts_path:
1856      The .dts path passed to __init__()
1857
1858    dts_source:
1859      The final DTS source code of the loaded devicetree after merging nodes
1860      and processing /delete-node/ and /delete-property/, as a string
1861
1862    bindings_dirs:
1863      The bindings directory paths passed to __init__()
1864
1865    scc_order:
1866      A list of lists of Nodes. All elements of each list
1867      depend on each other, and the Nodes in any list do not depend
1868      on any Node in a subsequent list. Each list defines a Strongly
1869      Connected Component (SCC) of the graph.
1870
1871      For an acyclic graph each list will be a singleton. Cycles
1872      will be represented by lists with multiple nodes. Cycles are
1873      not expected to be present in devicetree graphs.
1874
1875    The standard library's pickle module can be used to marshal and
1876    unmarshal EDT objects.
1877    """
1878
1879    def __init__(self,
1880                 dts: Optional[str],
1881                 bindings_dirs: List[str],
1882                 warn_reg_unit_address_mismatch: bool = True,
1883                 default_prop_types: bool = True,
1884                 support_fixed_partitions_on_any_bus: bool = True,
1885                 infer_binding_for_paths: Optional[Iterable[str]] = None,
1886                 vendor_prefixes: Optional[Dict[str, str]] = None,
1887                 werror: bool = False):
1888        """EDT constructor.
1889
1890        dts:
1891          Path to devicetree .dts file. Passing None for this value
1892          is only for internal use; do not do that outside of edtlib.
1893
1894        bindings_dirs:
1895          List of paths to directories containing bindings, in YAML format.
1896          These directories are recursively searched for .yaml files.
1897
1898        warn_reg_unit_address_mismatch (default: True):
1899          If True, a warning is logged if a node has a 'reg' property where
1900          the address of the first entry does not match the unit address of the
1901          node
1902
1903        default_prop_types (default: True):
1904          If True, default property types will be used when a node has no
1905          bindings.
1906
1907        support_fixed_partitions_on_any_bus (default True):
1908          If True, set the Node.bus for 'fixed-partitions' compatible nodes
1909          to None.  This allows 'fixed-partitions' binding to match regardless
1910          of the bus the 'fixed-partition' is under.
1911
1912        infer_binding_for_paths (default: None):
1913          An iterable of devicetree paths identifying nodes for which bindings
1914          should be inferred from the node content.  (Child nodes are not
1915          processed.)  Pass none if no nodes should support inferred bindings.
1916
1917        vendor_prefixes (default: None):
1918          A dict mapping vendor prefixes in compatible properties to their
1919          descriptions. If given, compatibles in the form "manufacturer,device"
1920          for which "manufacturer" is neither a key in the dict nor a specially
1921          exempt set of grandfathered-in cases will cause warnings.
1922
1923        werror (default: False):
1924          If True, some edtlib specific warnings become errors. This currently
1925          errors out if 'dts' has any deprecated properties set, or an unknown
1926          vendor prefix is used.
1927        """
1928        # All instance attributes should be initialized here.
1929        # This makes it easy to keep track of them, which makes
1930        # implementing __deepcopy__() easier.
1931        # If you change this, make sure to update __deepcopy__() too,
1932        # and update the tests for that method.
1933
1934        # Public attributes (the rest are properties)
1935        self.nodes: List[Node] = []
1936        self.compat2nodes: Dict[str, List[Node]] = defaultdict(list)
1937        self.compat2okay: Dict[str, List[Node]] = defaultdict(list)
1938        self.compat2vendor: Dict[str, str] = defaultdict(str)
1939        self.compat2model: Dict[str, str]  = defaultdict(str)
1940        self.label2node: Dict[str, Node] = {}
1941        self.dep_ord2node: Dict[int, Node] = {}
1942        self.dts_path: str = dts # type: ignore
1943        self.bindings_dirs: List[str] = list(bindings_dirs)
1944
1945        # Saved kwarg values for internal use
1946        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
1947        self._default_prop_types: bool = default_prop_types
1948        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
1949        self._infer_binding_for_paths: Set[str] = set(infer_binding_for_paths or [])
1950        self._vendor_prefixes: Dict[str, str] = vendor_prefixes or {}
1951        self._werror: bool = bool(werror)
1952
1953        # Other internal state
1954        self._compat2binding: Dict[Tuple[str, Optional[str]], Binding] = {}
1955        self._graph: Graph = Graph()
1956        self._binding_paths: List[str] = _binding_paths(self.bindings_dirs)
1957        self._binding_fname2path: Dict[str, str] = {
1958            os.path.basename(path): path
1959            for path in self._binding_paths
1960        }
1961        self._node2enode: Dict[dtlib_Node, Node] = {}
1962
1963        if dts is not None:
1964            try:
1965                self._dt = DT(dts)
1966            except DTError as e:
1967                raise EDTError(e) from e
1968            self._finish_init()
1969
1970    def _finish_init(self) -> None:
1971        # This helper exists to make the __deepcopy__() implementation
1972        # easier to keep in sync with __init__().
1973        _check_dt(self._dt)
1974
1975        self._init_compat2binding()
1976        self._init_nodes()
1977        self._init_graph()
1978        self._init_luts()
1979
1980        self._check()
1981
1982    def get_node(self, path: str) -> Node:
1983        """
1984        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
1985        path or alias doesn't exist.
1986        """
1987        try:
1988            return self._node2enode[self._dt.get_node(path)]
1989        except DTError as e:
1990            _err(e)
1991
1992    @property
1993    def chosen_nodes(self) -> Dict[str, Node]:
1994        ret: Dict[str, Node] = {}
1995
1996        try:
1997            chosen = self._dt.get_node("/chosen")
1998        except DTError:
1999            return ret
2000
2001        for name, prop in chosen.props.items():
2002            try:
2003                node = prop.to_path()
2004            except DTError:
2005                # DTS value is not phandle or string, or path doesn't exist
2006                continue
2007
2008            ret[name] = self._node2enode[node]
2009
2010        return ret
2011
2012    def chosen_node(self, name: str) -> Optional[Node]:
2013        """
2014        Returns the Node pointed at by the property named 'name' in /chosen, or
2015        None if the property is missing
2016        """
2017        return self.chosen_nodes.get(name)
2018
2019    @property
2020    def dts_source(self) -> str:
2021        return f"{self._dt}"
2022
2023    def __repr__(self) -> str:
2024        return f"<EDT for '{self.dts_path}', binding directories " \
2025            f"'{self.bindings_dirs}'>"
2026
2027    def __deepcopy__(self, memo) -> 'EDT':
2028        """
2029        Implements support for the standard library copy.deepcopy()
2030        function on EDT instances.
2031        """
2032
2033        ret = EDT(
2034            None,
2035            self.bindings_dirs,
2036            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2037            default_prop_types=self._default_prop_types,
2038            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2039            infer_binding_for_paths=set(self._infer_binding_for_paths),
2040            vendor_prefixes=dict(self._vendor_prefixes),
2041            werror=self._werror
2042        )
2043        ret.dts_path = self.dts_path
2044        ret._dt = deepcopy(self._dt, memo)
2045        ret._finish_init()
2046        return ret
2047
2048    @property
2049    def scc_order(self) -> List[List[Node]]:
2050        try:
2051            return self._graph.scc_order()
2052        except Exception as e:
2053            raise EDTError(e)
2054
2055    def _process_properties_r(self, root_node, props_node):
2056        """
2057        Process props_node properties for dependencies, and add those as
2058        dependencies of root_node. Then walk through all the props_node
2059        children and do the same recursively, maintaining the same root_node.
2060
2061        This ensures that on a node with child nodes, the parent node includes
2062        the dependencies of all the child nodes as well as its own.
2063        """
2064        # A Node depends on any Nodes present in 'phandle',
2065        # 'phandles', or 'phandle-array' property values.
2066        for prop in props_node.props.values():
2067            if prop.type == 'phandle':
2068                self._graph.add_edge(root_node, prop.val)
2069            elif prop.type == 'phandles':
2070                if TYPE_CHECKING:
2071                    assert isinstance(prop.val, list)
2072                for phandle_node in prop.val:
2073                    self._graph.add_edge(root_node, phandle_node)
2074            elif prop.type == 'phandle-array':
2075                if TYPE_CHECKING:
2076                    assert isinstance(prop.val, list)
2077                for cd in prop.val:
2078                    if cd is None:
2079                        continue
2080                    if TYPE_CHECKING:
2081                        assert isinstance(cd, ControllerAndData)
2082                    self._graph.add_edge(root_node, cd.controller)
2083
2084        # A Node depends on whatever supports the interrupts it
2085        # generates.
2086        for intr in props_node.interrupts:
2087            self._graph.add_edge(root_node, intr.controller)
2088
2089        # If the binding defines child bindings, link the child properties to
2090        # the root_node as well.
2091        if props_node._binding and props_node._binding.child_binding:
2092            for child in props_node.children.values():
2093                if "compatible" in child.props:
2094                    # Not a child node, normal node on a different binding.
2095                    continue
2096                self._process_properties_r(root_node, child)
2097
2098    def _process_properties(self, node):
2099        """
2100        Add node dependencies based on own as well as child node properties,
2101        start from the node itself.
2102        """
2103        self._process_properties_r(node, node)
2104
2105    def _init_graph(self) -> None:
2106        # Constructs a graph of dependencies between Node instances,
2107        # which is usable for computing a partial order over the dependencies.
2108        # The algorithm supports detecting dependency loops.
2109        #
2110        # Actually computing the SCC order is lazily deferred to the
2111        # first time the scc_order property is read.
2112
2113        for node in self.nodes:
2114            # Always insert root node
2115            if not node.parent:
2116                self._graph.add_node(node)
2117
2118            # A Node always depends on its parent.
2119            for child in node.children.values():
2120                self._graph.add_edge(child, node)
2121
2122            self._process_properties(node)
2123
2124    def _init_compat2binding(self) -> None:
2125        # Creates self._compat2binding, a dictionary that maps
2126        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2127        #
2128        # The Binding objects are created from YAML files discovered
2129        # in self.bindings_dirs as needed.
2130        #
2131        # For example, self._compat2binding["company,dev", "can"]
2132        # contains the Binding for the 'company,dev' device, when it
2133        # appears on the CAN bus.
2134        #
2135        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2136        # self._compat2binding["company,notonbus", None] is the Binding.
2137        #
2138        # Only bindings for 'compatible' strings that appear in the devicetree
2139        # are loaded.
2140
2141        dt_compats = _dt_compats(self._dt)
2142        # Searches for any 'compatible' string mentioned in the devicetree
2143        # files, with a regex
2144        dt_compats_search = re.compile(
2145            "|".join(re.escape(compat) for compat in dt_compats)
2146        ).search
2147
2148        for binding_path in self._binding_paths:
2149            with open(binding_path, encoding="utf-8") as f:
2150                contents = f.read()
2151
2152            # As an optimization, skip parsing files that don't contain any of
2153            # the .dts 'compatible' strings, which should be reasonably safe
2154            if not dt_compats_search(contents):
2155                continue
2156
2157            # Load the binding and check that it actually matches one of the
2158            # compatibles. Might get false positives above due to comments and
2159            # stuff.
2160
2161            try:
2162                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2163                # representing the file)
2164                raw = yaml.load(contents, Loader=_BindingLoader)
2165            except yaml.YAMLError as e:
2166                _err(
2167                        f"'{binding_path}' appears in binding directories "
2168                        f"but isn't valid YAML: {e}")
2169                continue
2170
2171            # Convert the raw data to a Binding object, erroring out
2172            # if necessary.
2173            binding = self._binding(raw, binding_path, dt_compats)
2174
2175            # Register the binding in self._compat2binding, along with
2176            # any child bindings that have their own compatibles.
2177            while binding is not None:
2178                if binding.compatible:
2179                    self._register_binding(binding)
2180                binding = binding.child_binding
2181
2182    def _binding(self,
2183                 raw: Optional[dict],
2184                 binding_path: str,
2185                 dt_compats: Set[str]) -> Optional[Binding]:
2186        # Convert a 'raw' binding from YAML to a Binding object and return it.
2187        #
2188        # Error out if the raw data looks like an invalid binding.
2189        #
2190        # Return None if the file doesn't contain a binding or the
2191        # binding's compatible isn't in dt_compats.
2192
2193        # Get the 'compatible:' string.
2194        if raw is None or "compatible" not in raw:
2195            # Empty file, binding fragment, spurious file, etc.
2196            return None
2197
2198        compatible = raw["compatible"]
2199
2200        if compatible not in dt_compats:
2201            # Not a compatible we care about.
2202            return None
2203
2204        # Initialize and return the Binding object.
2205        return Binding(binding_path, self._binding_fname2path, raw=raw)
2206
2207    def _register_binding(self, binding: Binding) -> None:
2208        # Do not allow two different bindings to have the same
2209        # 'compatible:'/'on-bus:' combo
2210        if TYPE_CHECKING:
2211            assert binding.compatible
2212        old_binding = self._compat2binding.get((binding.compatible,
2213                                                binding.on_bus))
2214        if old_binding:
2215            msg = (f"both {old_binding.path} and {binding.path} have "
2216                   f"'compatible: {binding.compatible}'")
2217            if binding.on_bus is not None:
2218                msg += f" and 'on-bus: {binding.on_bus}'"
2219            _err(msg)
2220
2221        # Register the binding.
2222        self._compat2binding[binding.compatible, binding.on_bus] = binding
2223
2224    def _init_nodes(self) -> None:
2225        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2226        # self.nodes
2227
2228        for dt_node in self._dt.node_iter():
2229            # Warning: We depend on parent Nodes being created before their
2230            # children. This is guaranteed by node_iter().
2231            if "compatible" in dt_node.props:
2232                compats = dt_node.props["compatible"].to_strings()
2233            else:
2234                compats = []
2235            node = Node(dt_node, self, compats)
2236            node.bus_node = node._bus_node(self._fixed_partitions_no_bus)
2237            node._init_binding()
2238            node._init_regs()
2239            node._init_ranges()
2240
2241            self.nodes.append(node)
2242            self._node2enode[dt_node] = node
2243
2244        for node in self.nodes:
2245            # These depend on all Node objects having been created, because
2246            # they (either always or sometimes) reference other nodes, so we
2247            # run them separately
2248            node._init_props(default_prop_types=self._default_prop_types,
2249                             err_on_deprecated=self._werror)
2250            node._init_interrupts()
2251            node._init_pinctrls()
2252
2253        if self._warn_reg_unit_address_mismatch:
2254            # This warning matches the simple_bus_reg warning in dtc
2255            for node in self.nodes:
2256                # Address mismatch is ok for PCI devices
2257                if (node.regs and node.regs[0].addr != node.unit_addr and
2258                        not node.is_pci_device):
2259                    _LOG.warning("unit address and first address in 'reg' "
2260                                 f"(0x{node.regs[0].addr:x}) don't match for "
2261                                 f"{node.path}")
2262
2263    def _init_luts(self) -> None:
2264        # Initialize node lookup tables (LUTs).
2265
2266        for node in self.nodes:
2267            for label in node.labels:
2268                self.label2node[label] = node
2269
2270            for compat in node.compats:
2271                self.compat2nodes[compat].append(node)
2272
2273                if node.status == "okay":
2274                    self.compat2okay[compat].append(node)
2275
2276                if compat in self.compat2vendor:
2277                    continue
2278
2279                # The regular expression comes from dt-schema.
2280                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2281                if not re.match(compat_re, compat):
2282                    _err(f"node '{node.path}' compatible '{compat}' "
2283                         'must match this regular expression: '
2284                         f"'{compat_re}'")
2285
2286                if ',' in compat and self._vendor_prefixes:
2287                    vendor, model = compat.split(',', 1)
2288                    if vendor in self._vendor_prefixes:
2289                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2290                        self.compat2model[compat] = model
2291
2292                    # As an exception, the root node can have whatever
2293                    # compatibles it wants. Other nodes get checked.
2294                    elif node.path != '/':
2295                        if self._werror:
2296                            handler_fn: Any = _err
2297                        else:
2298                            handler_fn = _LOG.warning
2299                        handler_fn(
2300                            f"node '{node.path}' compatible '{compat}' "
2301                            f"has unknown vendor prefix '{vendor}'")
2302
2303
2304        for nodeset in self.scc_order:
2305            node = nodeset[0]
2306            self.dep_ord2node[node.dep_ordinal] = node
2307
2308    def _check(self) -> None:
2309        # Tree-wide checks and warnings.
2310
2311        for binding in self._compat2binding.values():
2312            for spec in binding.prop2specs.values():
2313                if not spec.enum or spec.type != 'string':
2314                    continue
2315
2316                if not spec.enum_tokenizable:
2317                    _LOG.warning(
2318                        f"compatible '{binding.compatible}' "
2319                        f"in binding '{binding.path}' has non-tokenizable enum "
2320                        f"for property '{spec.name}': " +
2321                        ', '.join(repr(x) for x in spec.enum))
2322                elif not spec.enum_upper_tokenizable:
2323                    _LOG.warning(
2324                        f"compatible '{binding.compatible}' "
2325                        f"in binding '{binding.path}' has enum for property "
2326                        f"'{spec.name}' that is only tokenizable "
2327                        'in lowercase: ' +
2328                        ', '.join(repr(x) for x in spec.enum))
2329
2330        # Validate the contents of compatible properties.
2331        for node in self.nodes:
2332            if 'compatible' not in node.props:
2333                continue
2334
2335            compatibles = node.props['compatible'].val
2336
2337            # _check() runs after _init_compat2binding() has called
2338            # _dt_compats(), which already converted every compatible
2339            # property to a list of strings. So we know 'compatibles'
2340            # is a list, but add an assert for future-proofing.
2341            assert isinstance(compatibles, list)
2342
2343            for compat in compatibles:
2344                # This is also just for future-proofing.
2345                assert isinstance(compat, str)
2346
2347
2348def bindings_from_paths(yaml_paths: List[str],
2349                        ignore_errors: bool = False) -> List[Binding]:
2350    """
2351    Get a list of Binding objects from the yaml files 'yaml_paths'.
2352
2353    If 'ignore_errors' is True, YAML files that cause an EDTError when
2354    loaded are ignored. (No other exception types are silenced.)
2355    """
2356
2357    ret = []
2358    fname2path = {os.path.basename(path): path for path in yaml_paths}
2359    for path in yaml_paths:
2360        try:
2361            ret.append(Binding(path, fname2path))
2362        except EDTError:
2363            if ignore_errors:
2364                continue
2365            raise
2366
2367    return ret
2368
2369
2370class EDTError(Exception):
2371    "Exception raised for devicetree- and binding-related errors"
2372
2373#
2374# Public global functions
2375#
2376
2377
2378def load_vendor_prefixes_txt(vendor_prefixes: str) -> Dict[str, str]:
2379    """Load a vendor-prefixes.txt file and return a dict
2380    representation mapping a vendor prefix to the vendor name.
2381    """
2382    vnd2vendor: Dict[str, str] = {}
2383    with open(vendor_prefixes, 'r', encoding='utf-8') as f:
2384        for line in f:
2385            line = line.strip()
2386
2387            if not line or line.startswith('#'):
2388                # Comment or empty line.
2389                continue
2390
2391            # Other lines should be in this form:
2392            #
2393            # <vnd><TAB><vendor>
2394            vnd_vendor = line.split('\t', 1)
2395            assert len(vnd_vendor) == 2, line
2396            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2397    return vnd2vendor
2398
2399#
2400# Private global functions
2401#
2402
2403
2404def _dt_compats(dt: DT) -> Set[str]:
2405    # Returns a set() with all 'compatible' strings in the devicetree
2406    # represented by dt (a dtlib.DT instance)
2407
2408    return {compat
2409            for node in dt.node_iter()
2410                if "compatible" in node.props
2411                    for compat in node.props["compatible"].to_strings()}
2412
2413
2414def _binding_paths(bindings_dirs: List[str]) -> List[str]:
2415    # Returns a list with the paths to all bindings (.yaml files) in
2416    # 'bindings_dirs'
2417
2418    binding_paths = []
2419
2420    for bindings_dir in bindings_dirs:
2421        for root, _, filenames in os.walk(bindings_dir):
2422            for filename in filenames:
2423                if filename.endswith(".yaml") or filename.endswith(".yml"):
2424                    binding_paths.append(os.path.join(root, filename))
2425
2426    return binding_paths
2427
2428
2429def _binding_inc_error(msg):
2430    # Helper for reporting errors in the !include implementation
2431
2432    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2433
2434
2435def _check_include_dict(name: Optional[str],
2436                        allowlist: Optional[List[str]],
2437                        blocklist: Optional[List[str]],
2438                        child_filter: Optional[dict],
2439                        binding_path: Optional[str]) -> None:
2440    # Check that an 'include:' named 'name' with property-allowlist
2441    # 'allowlist', property-blocklist 'blocklist', and
2442    # child-binding filter 'child_filter' has valid structure.
2443
2444    if name is None:
2445        _err(f"'include:' element in {binding_path} "
2446             "should have a 'name' key")
2447
2448    if allowlist is not None and blocklist is not None:
2449        _err(f"'include:' of file '{name}' in {binding_path} "
2450             "should not specify both 'property-allowlist:' "
2451             "and 'property-blocklist:'")
2452
2453    while child_filter is not None:
2454        child_copy = deepcopy(child_filter)
2455        child_allowlist: Optional[List[str]] = \
2456            child_copy.pop('property-allowlist', None)
2457        child_blocklist: Optional[List[str]] = \
2458            child_copy.pop('property-blocklist', None)
2459        next_child_filter: Optional[dict] = \
2460            child_copy.pop('child-binding', None)
2461
2462        if child_copy:
2463            # We've popped out all the valid keys.
2464            _err(f"'include:' of file '{name}' in {binding_path} "
2465                 "should not have these unexpected contents in a "
2466                 f"'child-binding': {child_copy}")
2467
2468        if child_allowlist is not None and child_blocklist is not None:
2469            _err(f"'include:' of file '{name}' in {binding_path} "
2470                 "should not specify both 'property-allowlist:' and "
2471                 "'property-blocklist:' in a 'child-binding:'")
2472
2473        child_filter = next_child_filter
2474
2475
2476def _filter_properties(raw: dict,
2477                       allowlist: Optional[List[str]],
2478                       blocklist: Optional[List[str]],
2479                       child_filter: Optional[dict],
2480                       binding_path: Optional[str]) -> None:
2481    # Destructively modifies 'raw["properties"]' and
2482    # 'raw["child-binding"]', if they exist, according to
2483    # 'allowlist', 'blocklist', and 'child_filter'.
2484
2485    props = raw.get('properties')
2486    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2487
2488    child_binding = raw.get('child-binding')
2489    while child_filter is not None and child_binding is not None:
2490        _filter_properties_helper(child_binding.get('properties'),
2491                                  child_filter.get('property-allowlist'),
2492                                  child_filter.get('property-blocklist'),
2493                                  binding_path)
2494        child_filter = child_filter.get('child-binding')
2495        child_binding = child_binding.get('child-binding')
2496
2497
2498def _filter_properties_helper(props: Optional[dict],
2499                              allowlist: Optional[List[str]],
2500                              blocklist: Optional[List[str]],
2501                              binding_path: Optional[str]) -> None:
2502    if props is None or (allowlist is None and blocklist is None):
2503        return
2504
2505    _check_prop_filter('property-allowlist', allowlist, binding_path)
2506    _check_prop_filter('property-blocklist', blocklist, binding_path)
2507
2508    if allowlist is not None:
2509        allowset = set(allowlist)
2510        to_del = [prop for prop in props if prop not in allowset]
2511    else:
2512        if TYPE_CHECKING:
2513            assert blocklist
2514        blockset = set(blocklist)
2515        to_del = [prop for prop in props if prop in blockset]
2516
2517    for prop in to_del:
2518        del props[prop]
2519
2520
2521def _check_prop_filter(name: str, value: Optional[List[str]],
2522                       binding_path: Optional[str]) -> None:
2523    # Ensure an include: ... property-allowlist or property-blocklist
2524    # is a list.
2525
2526    if value is None:
2527        return
2528
2529    if not isinstance(value, list):
2530        _err(f"'{name}' value {value} in {binding_path} should be a list")
2531
2532
2533def _merge_props(to_dict: dict,
2534                 from_dict: dict,
2535                 parent: Optional[str],
2536                 binding_path: Optional[str],
2537                 check_required: bool = False):
2538    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2539    #
2540    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2541    # property, then the values are ORed together.
2542    #
2543    # If 'check_required' is True, then an error is raised if 'from_dict' has
2544    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2545    # bindings from "downgrading" requirements from bindings they include,
2546    # which might help keep bindings well-organized.
2547    #
2548    # It's an error for most other keys to appear in both 'from_dict' and
2549    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2550    # precedence.
2551    #
2552    # 'parent' is the name of the parent key containing 'to_dict' and
2553    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2554    # These are used to generate errors for sketchy property overwrites.
2555
2556    for prop in from_dict:
2557        if isinstance(to_dict.get(prop), dict) and \
2558           isinstance(from_dict[prop], dict):
2559            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2560                         check_required)
2561        elif prop not in to_dict:
2562            to_dict[prop] = from_dict[prop]
2563        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2564            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2565                 f"from included file overwritten ('{from_dict[prop]}' "
2566                 f"replaced with '{to_dict[prop]}')")
2567        elif prop == "required":
2568            # Need a separate check here, because this code runs before
2569            # Binding._check()
2570            if not (isinstance(from_dict["required"], bool) and
2571                    isinstance(to_dict["required"], bool)):
2572                _err(f"malformed 'required:' setting for '{parent}' in "
2573                     f"'properties' in {binding_path}, expected true/false")
2574
2575            # 'required: true' takes precedence
2576            to_dict["required"] = to_dict["required"] or from_dict["required"]
2577
2578
2579def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2580                   check_required: bool) -> bool:
2581    # _merge_props() helper. Returns True in cases where it's bad that
2582    # to_dict[prop] takes precedence over from_dict[prop].
2583
2584    if to_dict[prop] == from_dict[prop]:
2585        return False
2586
2587    # These are overridden deliberately
2588    if prop in {"title", "description", "compatible"}:
2589        return False
2590
2591    if prop == "required":
2592        if not check_required:
2593            return False
2594        return from_dict[prop] and not to_dict[prop]
2595
2596    return True
2597
2598
2599def _binding_include(loader, node):
2600    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2601    # just becomes [foo, bar].
2602
2603    if isinstance(node, yaml.ScalarNode):
2604        # !include foo.yaml
2605        return [loader.construct_scalar(node)]
2606
2607    if isinstance(node, yaml.SequenceNode):
2608        # !include [foo.yaml, bar.yaml]
2609        return loader.construct_sequence(node)
2610
2611    _binding_inc_error("unrecognised node type in !include statement")
2612
2613
2614def _check_prop_by_type(prop_name: str,
2615                        options: dict,
2616                        binding_path: Optional[str]) -> None:
2617    # Binding._check_properties() helper. Checks 'type:', 'default:',
2618    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2619
2620    prop_type = options.get("type")
2621    default = options.get("default")
2622    const = options.get("const")
2623
2624    if prop_type is None:
2625        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2626             f"{binding_path}")
2627
2628    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2629                "string-array", "phandle", "phandles", "phandle-array",
2630                "path", "compound"}
2631
2632    if prop_type not in ok_types:
2633        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2634             f"has unknown type '{prop_type}', expected one of " +
2635             ", ".join(ok_types))
2636
2637    if "specifier-space" in options and prop_type != "phandle-array":
2638        _err(f"'specifier-space' in 'properties: {prop_name}' "
2639             f"has type '{prop_type}', expected 'phandle-array'")
2640
2641    if prop_type == "phandle-array":
2642        if not prop_name.endswith("s") and not "specifier-space" in options:
2643            _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2644                 f"has type 'phandle-array' and its name does not end in 's', "
2645                 f"but no 'specifier-space' was provided.")
2646
2647    # If you change const_types, be sure to update the type annotation
2648    # for PropertySpec.const.
2649    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2650    if const and prop_type not in const_types:
2651        _err(f"const in {binding_path} for property '{prop_name}' "
2652             f"has type '{prop_type}', expected one of " +
2653             ", ".join(const_types))
2654
2655    # Check default
2656
2657    if default is None:
2658        return
2659
2660    if prop_type in {"boolean", "compound", "phandle", "phandles",
2661                     "phandle-array", "path"}:
2662        _err("'default:' can't be combined with "
2663             f"'type: {prop_type}' for '{prop_name}' in "
2664             f"'properties:' in {binding_path}")
2665
2666    def ok_default() -> bool:
2667        # Returns True if 'default' is an okay default for the property's type.
2668        # If you change this, be sure to update the type annotation for
2669        # PropertySpec.default.
2670
2671        if prop_type == "int" and isinstance(default, int) or \
2672           prop_type == "string" and isinstance(default, str):
2673            return True
2674
2675        # array, uint8-array, or string-array
2676
2677        if not isinstance(default, list):
2678            return False
2679
2680        if prop_type == "array" and \
2681           all(isinstance(val, int) for val in default):
2682            return True
2683
2684        if prop_type == "uint8-array" and \
2685           all(isinstance(val, int) and 0 <= val <= 255 for val in default):
2686            return True
2687
2688        # string-array
2689        return all(isinstance(val, str) for val in default)
2690
2691    if not ok_default():
2692        _err(f"'default: {default}' is invalid for '{prop_name}' "
2693             f"in 'properties:' in {binding_path}, "
2694             f"which has type {prop_type}")
2695
2696
2697def _translate(addr: int, node: dtlib_Node) -> int:
2698    # Recursively translates 'addr' on 'node' to the address space(s) of its
2699    # parent(s), by looking at 'ranges' properties. Returns the translated
2700    # address.
2701
2702    if not node.parent or "ranges" not in node.parent.props:
2703        # No translation
2704        return addr
2705
2706    if not node.parent.props["ranges"].value:
2707        # DT spec.: "If the property is defined with an <empty> value, it
2708        # specifies that the parent and child address space is identical, and
2709        # no address translation is required."
2710        #
2711        # Treat this the same as a 'range' that explicitly does a one-to-one
2712        # mapping, as opposed to there not being any translation.
2713        return _translate(addr, node.parent)
2714
2715    # Gives the size of each component in a translation 3-tuple in 'ranges'
2716    child_address_cells = _address_cells(node)
2717    parent_address_cells = _address_cells(node.parent)
2718    child_size_cells = _size_cells(node)
2719
2720    # Number of cells for one translation 3-tuple in 'ranges'
2721    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2722
2723    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2724                            f"4*(<#address-cells> (= {child_address_cells}) + "
2725                            "<#address-cells for parent> "
2726                            f"(= {parent_address_cells}) + "
2727                            f"<#size-cells> (= {child_size_cells}))"):
2728        child_addr = to_num(raw_range[:4*child_address_cells])
2729        raw_range = raw_range[4*child_address_cells:]
2730
2731        parent_addr = to_num(raw_range[:4*parent_address_cells])
2732        raw_range = raw_range[4*parent_address_cells:]
2733
2734        child_len = to_num(raw_range)
2735
2736        if child_addr <= addr < child_addr + child_len:
2737            # 'addr' is within range of a translation in 'ranges'. Recursively
2738            # translate it and return the result.
2739            return _translate(parent_addr + addr - child_addr, node.parent)
2740
2741    # 'addr' is not within range of any translation in 'ranges'
2742    return addr
2743
2744
2745def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2746    # Helper for registering names from <foo>-names properties.
2747    #
2748    # node:
2749    #   Node which has a property that might need named elements.
2750    #
2751    # names-ident:
2752    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2753    #
2754    # objs:
2755    #   list of objects whose .name field should be set
2756
2757    full_names_ident = names_ident + "-names"
2758
2759    if full_names_ident in node.props:
2760        names = node.props[full_names_ident].to_strings()
2761        if len(names) != len(objs):
2762            _err(f"{full_names_ident} property in {node.path} "
2763                 f"in {node.dt.filename} has {len(names)} strings, "
2764                 f"expected {len(objs)} strings")
2765
2766        for obj, name in zip(objs, names):
2767            if obj is None:
2768                continue
2769            obj.name = name
2770    else:
2771        for obj in objs:
2772            if obj is not None:
2773                obj.name = None
2774
2775
2776def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2777    # Returns the node pointed at by the closest 'interrupt-parent', searching
2778    # the parents of 'node'. As of writing, this behavior isn't specified in
2779    # the DT spec., but seems to match what some .dts files except.
2780
2781    node: Optional[dtlib_Node] = start_node
2782
2783    while node:
2784        if "interrupt-parent" in node.props:
2785            return node.props["interrupt-parent"].to_node()
2786        node = node.parent
2787
2788    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2789         f"nor any of its parents has an 'interrupt-parent' property")
2790
2791
2792def _interrupts(node: dtlib_Node) -> List[Tuple[dtlib_Node, bytes]]:
2793    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2794    # interrupt generated by 'node'. <controller> is the destination of the
2795    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2796    # the data associated with the interrupt (as a 'bytes' object).
2797
2798    # Takes precedence over 'interrupts' if both are present
2799    if "interrupts-extended" in node.props:
2800        prop = node.props["interrupts-extended"]
2801
2802        ret: List[Tuple[dtlib_Node, bytes]] = []
2803        for entry in _phandle_val_list(prop, "interrupt"):
2804            if entry is None:
2805                _err(f"node '{node.path}' interrupts-extended property "
2806                     "has an empty element")
2807            iparent, spec = entry
2808            ret.append(_map_interrupt(node, iparent, spec))
2809        return ret
2810
2811    if "interrupts" in node.props:
2812        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2813        # the same interrupt parent for all interrupts
2814
2815        iparent = _interrupt_parent(node)
2816        interrupt_cells = _interrupt_cells(iparent)
2817
2818        return [_map_interrupt(node, iparent, raw)
2819                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2820                                  "4*<#interrupt-cells>")]
2821
2822    return []
2823
2824
2825def _map_interrupt(
2826        child: dtlib_Node,
2827        parent: dtlib_Node,
2828        child_spec: bytes
2829) -> Tuple[dtlib_Node, bytes]:
2830    # Translates an interrupt headed from 'child' to 'parent' with data
2831    # 'child_spec' through any 'interrupt-map' properties. Returns a
2832    # (<controller>, <data>) tuple with the final destination after mapping.
2833
2834    if "interrupt-controller" in parent.props:
2835        return (parent, child_spec)
2836
2837    def own_address_cells(node):
2838        # Used for parents pointed at by 'interrupt-map'. We can't use
2839        # _address_cells(), because it's the #address-cells property on 'node'
2840        # itself that matters.
2841
2842        address_cells = node.props.get("#address-cells")
2843        if not address_cells:
2844            _err(f"missing #address-cells on {node!r} "
2845                 "(while handling interrupt-map)")
2846        return address_cells.to_num()
2847
2848    def spec_len_fn(node):
2849        # Can't use _address_cells() here, because it's the #address-cells
2850        # property on 'node' itself that matters
2851        return own_address_cells(node) + _interrupt_cells(node)
2852
2853    parent, raw_spec = _map(
2854        "interrupt", child, parent, _raw_unit_addr(child) + child_spec,
2855        spec_len_fn, require_controller=True)
2856
2857    # Strip the parent unit address part, if any
2858    return (parent, raw_spec[4*own_address_cells(parent):])
2859
2860
2861def _map_phandle_array_entry(
2862        child: dtlib_Node,
2863        parent: dtlib_Node,
2864        child_spec: bytes,
2865        basename: str
2866) -> Tuple[dtlib_Node, bytes]:
2867    # Returns a (<controller>, <data>) tuple with the final destination after
2868    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
2869    # _map_interrupt().
2870
2871    def spec_len_fn(node):
2872        prop_name = f"#{basename}-cells"
2873        if prop_name not in node.props:
2874            _err(f"expected '{prop_name}' property on {node!r} "
2875                 f"(referenced by {child!r})")
2876        return node.props[prop_name].to_num()
2877
2878    # Do not require <prefix>-controller for anything but interrupts for now
2879    return _map(basename, child, parent, child_spec, spec_len_fn,
2880                require_controller=False)
2881
2882
2883def _map(
2884        prefix: str,
2885        child: dtlib_Node,
2886        parent: dtlib_Node,
2887        child_spec: bytes,
2888        spec_len_fn: Callable[[dtlib_Node], int],
2889        require_controller: bool
2890) -> Tuple[dtlib_Node, bytes]:
2891    # Common code for mapping through <prefix>-map properties, e.g.
2892    # interrupt-map and gpio-map.
2893    #
2894    # prefix:
2895    #   The prefix, e.g. "interrupt" or "gpio"
2896    #
2897    # child:
2898    #   The "sender", e.g. the node with 'interrupts = <...>'
2899    #
2900    # parent:
2901    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
2902    #   'interrupt-controller' (no mapping)
2903    #
2904    # child_spec:
2905    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
2906    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
2907    #
2908    # spec_len_fn:
2909    #   Function called on a parent specified in a *-map property to get the
2910    #   length of the parent specifier (data after phandle in *-map), in cells
2911    #
2912    # require_controller:
2913    #   If True, the final controller node after mapping is required to have
2914    #   to have a <prefix>-controller property.
2915
2916    map_prop = parent.props.get(prefix + "-map")
2917    if not map_prop:
2918        if require_controller and prefix + "-controller" not in parent.props:
2919            _err(f"expected '{prefix}-controller' property on {parent!r} "
2920                 f"(referenced by {child!r})")
2921
2922        # No mapping
2923        return (parent, child_spec)
2924
2925    masked_child_spec = _mask(prefix, child, parent, child_spec)
2926
2927    raw = map_prop.value
2928    while raw:
2929        if len(raw) < len(child_spec):
2930            _err(f"bad value for {map_prop!r}, missing/truncated child data")
2931        child_spec_entry = raw[:len(child_spec)]
2932        raw = raw[len(child_spec):]
2933
2934        if len(raw) < 4:
2935            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
2936        phandle = to_num(raw[:4])
2937        raw = raw[4:]
2938
2939        # Parent specified in *-map
2940        map_parent = parent.dt.phandle2node.get(phandle)
2941        if not map_parent:
2942            _err(f"bad phandle ({phandle}) in {map_prop!r}")
2943
2944        map_parent_spec_len = 4*spec_len_fn(map_parent)
2945        if len(raw) < map_parent_spec_len:
2946            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
2947        parent_spec = raw[:map_parent_spec_len]
2948        raw = raw[map_parent_spec_len:]
2949
2950        # Got one *-map row. Check if it matches the child data.
2951        if child_spec_entry == masked_child_spec:
2952            # Handle *-map-pass-thru
2953            parent_spec = _pass_thru(
2954                prefix, child, parent, child_spec, parent_spec)
2955
2956            # Found match. Recursively map and return it.
2957            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
2958                        require_controller)
2959
2960    _err(f"child specifier for {child!r} ({child_spec!r}) "
2961         f"does not appear in {map_prop!r}")
2962
2963
2964def _mask(
2965        prefix: str,
2966        child: dtlib_Node,
2967        parent: dtlib_Node,
2968        child_spec: bytes
2969) -> bytes:
2970    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
2971    # See _map() for the parameters.
2972
2973    mask_prop = parent.props.get(prefix + "-map-mask")
2974    if not mask_prop:
2975        # No mask
2976        return child_spec
2977
2978    mask = mask_prop.value
2979    if len(mask) != len(child_spec):
2980        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
2981             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
2982
2983    return _and(child_spec, mask)
2984
2985
2986def _pass_thru(
2987        prefix: str,
2988        child: dtlib_Node,
2989        parent: dtlib_Node,
2990        child_spec: bytes,
2991        parent_spec: bytes
2992) -> bytes:
2993    # Common code for handling <prefix>-map-thru properties, e.g.
2994    # interrupt-pass-thru.
2995    #
2996    # parent_spec:
2997    #   The parent data from the matched entry in the <prefix>-map property
2998    #
2999    # See _map() for the other parameters.
3000
3001    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
3002    if not pass_thru_prop:
3003        # No pass-thru
3004        return parent_spec
3005
3006    pass_thru = pass_thru_prop.value
3007    if len(pass_thru) != len(child_spec):
3008        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
3009             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
3010
3011    res = _or(_and(child_spec, pass_thru),
3012              _and(parent_spec, _not(pass_thru)))
3013
3014    # Truncate to length of parent spec.
3015    return res[-len(parent_spec):]
3016
3017
3018def _raw_unit_addr(node: dtlib_Node) -> bytes:
3019    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
3020    # #address-cells) as a raw 'bytes'
3021
3022    if 'reg' not in node.props:
3023        _err(f"{node!r} lacks 'reg' property "
3024             "(needed for 'interrupt-map' unit address lookup)")
3025
3026    addr_len = 4*_address_cells(node)
3027
3028    if len(node.props['reg'].value) < addr_len:
3029        _err(f"{node!r} has too short 'reg' property "
3030             "(while doing 'interrupt-map' unit address lookup)")
3031
3032    return node.props['reg'].value[:addr_len]
3033
3034
3035def _and(b1: bytes, b2: bytes) -> bytes:
3036    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
3037    # with ones on the left if the lengths are not equal.
3038
3039    # Pad on the left, to equal length
3040    maxlen = max(len(b1), len(b2))
3041    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
3042                                       b2.rjust(maxlen, b'\xff')))
3043
3044
3045def _or(b1: bytes, b2: bytes) -> bytes:
3046    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3047    # zeros on the left if the lengths are not equal.
3048
3049    # Pad on the left, to equal length
3050    maxlen = max(len(b1), len(b2))
3051    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3052                                       b2.rjust(maxlen, b'\x00')))
3053
3054
3055def _not(b: bytes) -> bytes:
3056    # Returns the bitwise not of the 'bytes' object 'b'
3057
3058    # ANDing with 0xFF avoids negative numbers
3059    return bytes(~x & 0xFF for x in b)
3060
3061
3062def _phandle_val_list(
3063        prop: dtlib_Property,
3064        n_cells_name: str
3065) -> List[Optional[Tuple[dtlib_Node, bytes]]]:
3066    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3067    # cells that make up each <value> is derived from the node pointed at by
3068    # the preceding <phandle>.
3069    #
3070    # prop:
3071    #   dtlib.Property with value to parse
3072    #
3073    # n_cells_name:
3074    #   The <name> part of the #<name>-cells property to look for on the nodes
3075    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3076    #
3077    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3078    # is the node pointed at by <phandle>. If <phandle> does not refer
3079    # to a node, the entire list element is None.
3080
3081    full_n_cells_name = f"#{n_cells_name}-cells"
3082
3083    res: List[Optional[Tuple[dtlib_Node, bytes]]] = []
3084
3085    raw = prop.value
3086    while raw:
3087        if len(raw) < 4:
3088            # Not enough room for phandle
3089            _err("bad value for " + repr(prop))
3090        phandle = to_num(raw[:4])
3091        raw = raw[4:]
3092
3093        node = prop.node.dt.phandle2node.get(phandle)
3094        if not node:
3095            # Unspecified phandle-array element. This is valid; a 0
3096            # phandle value followed by no cells is an empty element.
3097            res.append(None)
3098            continue
3099
3100        if full_n_cells_name not in node.props:
3101            _err(f"{node!r} lacks {full_n_cells_name}")
3102
3103        n_cells = node.props[full_n_cells_name].to_num()
3104        if len(raw) < 4*n_cells:
3105            _err("missing data after phandle in " + repr(prop))
3106
3107        res.append((node, raw[:4*n_cells]))
3108        raw = raw[4*n_cells:]
3109
3110    return res
3111
3112
3113def _address_cells(node: dtlib_Node) -> int:
3114    # Returns the #address-cells setting for 'node', giving the number of <u32>
3115    # cells used to encode the address in the 'reg' property
3116    if TYPE_CHECKING:
3117        assert node.parent
3118
3119    if "#address-cells" in node.parent.props:
3120        return node.parent.props["#address-cells"].to_num()
3121    return 2  # Default value per DT spec.
3122
3123
3124def _size_cells(node: dtlib_Node) -> int:
3125    # Returns the #size-cells setting for 'node', giving the number of <u32>
3126    # cells used to encode the size in the 'reg' property
3127    if TYPE_CHECKING:
3128        assert node.parent
3129
3130    if "#size-cells" in node.parent.props:
3131        return node.parent.props["#size-cells"].to_num()
3132    return 1  # Default value per DT spec.
3133
3134
3135def _interrupt_cells(node: dtlib_Node) -> int:
3136    # Returns the #interrupt-cells property value on 'node', erroring out if
3137    # 'node' has no #interrupt-cells property
3138
3139    if "#interrupt-cells" not in node.props:
3140        _err(f"{node!r} lacks #interrupt-cells")
3141    return node.props["#interrupt-cells"].to_num()
3142
3143
3144def _slice(node: dtlib_Node,
3145           prop_name: str,
3146           size: int,
3147           size_hint: str) -> List[bytes]:
3148    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3149
3150
3151def _check_dt(dt: DT) -> None:
3152    # Does devicetree sanity checks. dtlib is meant to be general and
3153    # anything-goes except for very special properties like phandle, but in
3154    # edtlib we can be pickier.
3155
3156    # Check that 'status' has one of the values given in the devicetree spec.
3157
3158    # Accept "ok" for backwards compatibility
3159    ok_status = {"ok", "okay", "disabled", "reserved", "fail", "fail-sss"}
3160
3161    for node in dt.node_iter():
3162        if "status" in node.props:
3163            try:
3164                status_val = node.props["status"].to_string()
3165            except DTError as e:
3166                # The error message gives the path
3167                _err(str(e))
3168
3169            if status_val not in ok_status:
3170                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3171                     f"in {node.dt.filename}, expected one of " +
3172                     ", ".join(ok_status) +
3173                     " (see the devicetree specification)")
3174
3175        ranges_prop = node.props.get("ranges")
3176        if ranges_prop:
3177            if ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3178                _err(f"expected 'ranges = < ... >;' in {node.path} in "
3179                     f"{node.dt.filename}, not '{ranges_prop}' "
3180                     "(see the devicetree specification)")
3181
3182
3183def _err(msg) -> NoReturn:
3184    raise EDTError(msg)
3185
3186# Logging object
3187_LOG = logging.getLogger(__name__)
3188
3189# Regular expression for non-alphanumeric-or-underscore characters.
3190_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3191
3192
3193def str_as_token(val: str) -> str:
3194    """Return a canonical representation of a string as a C token.
3195
3196    This converts special characters in 'val' to underscores, and
3197    returns the result."""
3198
3199    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3200
3201
3202# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3203# which could interfere with YAML loading in clients
3204class _BindingLoader(Loader):
3205    pass
3206
3207
3208# Add legacy '!include foo.yaml' handling
3209_BindingLoader.add_constructor("!include", _binding_include)
3210
3211#
3212# "Default" binding for properties which are defined by the spec.
3213#
3214# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3215# updating the documentation for the DT_PROP() macro in
3216# include/devicetree.h.
3217#
3218
3219_DEFAULT_PROP_TYPES: Dict[str, str] = {
3220    "compatible": "string-array",
3221    "status": "string",
3222    "ranges": "compound",  # NUMS or EMPTY
3223    "reg": "array",
3224    "reg-names": "string-array",
3225    "label": "string",
3226    "interrupts": "array",
3227    "interrupts-extended": "compound",
3228    "interrupt-names": "string-array",
3229    "interrupt-controller": "boolean",
3230}
3231
3232_STATUS_ENUM: List[str] = "ok okay disabled reserved fail fail-sss".split()
3233
3234def _raw_default_property_for(
3235        name: str
3236) -> Dict[str, Union[str, bool, List[str]]]:
3237    ret: Dict[str, Union[str, bool, List[str]]] = {
3238        'type': _DEFAULT_PROP_TYPES[name],
3239        'required': False,
3240    }
3241    if name == 'status':
3242        ret['enum'] = _STATUS_ENUM
3243    return ret
3244
3245_DEFAULT_PROP_BINDING: Binding = Binding(
3246    None, {},
3247    raw={
3248        'properties': {
3249            name: _raw_default_property_for(name)
3250            for name in _DEFAULT_PROP_TYPES
3251        },
3252    },
3253    require_compatible=False, require_description=False,
3254)
3255
3256_DEFAULT_PROP_SPECS: Dict[str, PropertySpec] = {
3257    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3258    for name in _DEFAULT_PROP_TYPES
3259}
3260