1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# SPDX-License-Identifier: BSD-3-Clause
4
5# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
6
7"""
8Library for working with devicetrees at a higher level compared to dtlib. Like
9dtlib, this library presents a tree of devicetree nodes, but the nodes are
10augmented with information from bindings and include some interpretation of
11properties. Some of this interpretation is based on conventions established
12by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
13source code is sometimes good reference material.
14
15Bindings are YAML files that describe devicetree nodes. Devicetree
16nodes are usually mapped to bindings via their 'compatible = "..."' property,
17but a binding can also come from a 'child-binding:' key in the binding for the
18parent devicetree node.
19
20Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
21which has all the information related to the node.
22
23The top-level entry points for the library are the EDT and Binding classes.
24See their constructor docstrings for details. There is also a
25bindings_from_paths() helper function.
26"""
27
28# NOTE: tests/test_edtlib.py is the test suite for this library.
29
30# Implementation notes
31# --------------------
32#
33# A '_' prefix on an identifier in Python is a convention for marking it private.
34# Please do not access private things. Instead, think of what API you need, and
35# add it.
36#
37# This module is not meant to have any global state. It should be possible to
38# create several EDT objects with independent binding paths and flags. If you
39# need to add a configuration parameter or the like, store it in the EDT
40# instance, and initialize it e.g. with a constructor argument.
41#
42# This library is layered on top of dtlib, and is not meant to expose it to
43# clients. This keeps the header generation script simple.
44#
45# General biased advice:
46#
47# - Consider using @property for APIs that don't need parameters. It makes
48#   functions look like attributes, which is less awkward in clients, and makes
49#   it easy to switch back and forth between variables and functions.
50#
51# - Think about the data type of the thing you're exposing. Exposing something
52#   as e.g. a list or a dictionary is often nicer and more flexible than adding
53#   a function.
54#
55# - Avoid get_*() prefixes on functions. Name them after the thing they return
56#   instead. This often makes the code read more naturally in callers.
57#
58#   Also, consider using @property instead of get_*().
59#
60# - Don't expose dtlib stuff directly.
61#
62# - Add documentation for any new APIs you add.
63#
64#   The convention here is that docstrings (quoted strings) are used for public
65#   APIs, and "doc comments" for internal functions.
66#
67#   @properties are documented in the class docstring, as if they were
68#   variables. See the existing @properties for a template.
69
70from collections import defaultdict
71from copy import deepcopy
72from dataclasses import dataclass
73from typing import Any, Callable, Dict, Iterable, List, NoReturn, \
74    Optional, Set, TYPE_CHECKING, Tuple, Union
75import logging
76import os
77import re
78
79import yaml
80try:
81    # Use the C LibYAML parser if available, rather than the Python parser.
82    # This makes e.g. gen_defines.py more than twice as fast.
83    from yaml import CLoader as Loader
84except ImportError:
85    from yaml import Loader     # type: ignore
86
87from devicetree.dtlib import DT, DTError, to_num, to_nums, Type
88from devicetree.dtlib import Node as dtlib_Node
89from devicetree.dtlib import Property as dtlib_Property
90from devicetree.grutils import Graph
91from devicetree._private import _slice_helper
92
93#
94# Public classes
95#
96
97
98class Binding:
99    """
100    Represents a parsed binding.
101
102    These attributes are available on Binding objects:
103
104    path:
105      The absolute path to the file defining the binding.
106
107    description:
108      The free-form description of the binding, or None.
109
110    compatible:
111      The compatible string the binding matches.
112
113      This may be None. For example, it's None when the Binding is inferred
114      from node properties. It can also be None for Binding objects created
115      using 'child-binding:' with no compatible.
116
117    prop2specs:
118      A dict mapping property names to PropertySpec objects
119      describing those properties' values.
120
121    specifier2cells:
122      A dict that maps specifier space names (like "gpio",
123      "clock", "pwm", etc.) to lists of cell names.
124
125      For example, if the binding YAML contains 'pin' and 'flags' cell names
126      for the 'gpio' specifier space, like this:
127
128          gpio-cells:
129          - pin
130          - flags
131
132      Then the Binding object will have a 'specifier2cells' attribute mapping
133      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
134      cells.
135
136    raw:
137      The binding as an object parsed from YAML.
138
139    bus:
140      If nodes with this binding's 'compatible' describe a bus, a string
141      describing the bus type (like "i2c") or a list describing supported
142      protocols (like ["i3c", "i2c"]). None otherwise.
143
144      Note that this is the raw value from the binding where it can be
145      a string or a list. Use "buses" instead unless you need the raw
146      value, where "buses" is always a list.
147
148    buses:
149      Deprived property from 'bus' where 'buses' is a list of bus(es),
150      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
151      no 'bus:' in this binding.
152
153    on_bus:
154      If nodes with this binding's 'compatible' appear on a bus, a string
155      describing the bus type (like "i2c"). None otherwise.
156
157    child_binding:
158      If this binding describes the properties of child nodes, then
159      this is a Binding object for those children; it is None otherwise.
160      A Binding object's 'child_binding.child_binding' is not None if there
161      are multiple levels of 'child-binding' descriptions in the binding.
162    """
163
164    def __init__(self, path: Optional[str], fname2path: Dict[str, str],
165                 raw: Any = None, require_compatible: bool = True,
166                 require_description: bool = True,
167                 inc_allowlist: Optional[List[str]] = None,
168                 inc_blocklist: Optional[List[str]] = None):
169        """
170        Binding constructor.
171
172        path:
173          Path to binding YAML file. May be None.
174
175        fname2path:
176          Map from include files to their absolute paths. Must
177          not be None, but may be empty.
178
179        raw:
180          Optional raw content in the binding.
181          This does not have to have any "include:" lines resolved.
182          May be left out, in which case 'path' is opened and read.
183          This can be used to resolve child bindings, for example.
184
185        require_compatible:
186          If True, it is an error if the binding does not contain a
187          "compatible:" line. If False, a missing "compatible:" is
188          not an error. Either way, "compatible:" must be a string
189          if it is present in the binding.
190
191        require_description:
192          If True, it is an error if the binding does not contain a
193          "description:" line. If False, a missing "description:" is
194          not an error. Either way, "description:" must be a string
195          if it is present in the binding.
196
197        inc_allowlist:
198          The property-allowlist filter set by including bindings.
199
200        inc_blocklist:
201          The property-blocklist filter set by including bindings.
202        """
203        self.path: Optional[str] = path
204        self._fname2path: Dict[str, str] = fname2path
205
206        self._inc_allowlist: Optional[List[str]] = inc_allowlist
207        self._inc_blocklist: Optional[List[str]] = inc_blocklist
208
209        if raw is None:
210            if path is None:
211                _err("you must provide either a 'path' or a 'raw' argument")
212            with open(path, encoding="utf-8") as f:
213                raw = yaml.load(f, Loader=_BindingLoader)
214
215        # Get the properties this binding modifies
216        # before we merge the included ones.
217        last_modified_props = list(raw.get("properties", {}).keys())
218
219        # Map property names to their specifications:
220        # - first, _merge_includes() will recursively populate prop2specs with
221        #   the properties specified by the included bindings
222        # - eventually, we'll update prop2specs with the properties
223        #   this binding itself defines or modifies
224        self.prop2specs: Dict[str, 'PropertySpec'] = {}
225
226        # Merge any included files into self.raw. This also pulls in
227        # inherited child binding definitions, so it has to be done
228        # before initializing those.
229        self.raw: dict = self._merge_includes(raw, self.path)
230
231        # Recursively initialize any child bindings. These don't
232        # require a 'compatible' or 'description' to be well defined,
233        # but they must be dicts.
234        if "child-binding" in raw:
235            if not isinstance(raw["child-binding"], dict):
236                _err(f"malformed 'child-binding:' in {self.path}, "
237                     "expected a binding (dictionary with keys/values)")
238            self.child_binding: Optional['Binding'] = Binding(
239                path, fname2path,
240                raw=raw["child-binding"],
241                require_compatible=False,
242                require_description=False)
243        else:
244            self.child_binding = None
245
246        # Make sure this is a well defined object.
247        self._check(require_compatible, require_description)
248
249        # Update specs with the properties this binding defines or modifies.
250        for prop_name in last_modified_props:
251            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
252
253        # Initialize look up tables.
254        self.specifier2cells: Dict[str, List[str]] = {}
255        for key, val in self.raw.items():
256            if key.endswith("-cells"):
257                self.specifier2cells[key[:-len("-cells")]] = val
258
259    def __repr__(self) -> str:
260        if self.compatible:
261            compat = f" for compatible '{self.compatible}'"
262        else:
263            compat = ""
264        basename = os.path.basename(self.path or "")
265        return f"<Binding {basename}" + compat + ">"
266
267    @property
268    def description(self) -> Optional[str]:
269        "See the class docstring"
270        return self.raw.get('description')
271
272    @property
273    def compatible(self) -> Optional[str]:
274        "See the class docstring"
275        return self.raw.get('compatible')
276
277    @property
278    def bus(self) -> Union[None, str, List[str]]:
279        "See the class docstring"
280        return self.raw.get('bus')
281
282    @property
283    def buses(self) -> List[str]:
284        "See the class docstring"
285        if self.raw.get('bus') is not None:
286            return self._buses
287        else:
288            return []
289
290    @property
291    def on_bus(self) -> Optional[str]:
292        "See the class docstring"
293        return self.raw.get('on-bus')
294
295    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
296        # Constructor helper. Merges included files in
297        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
298        # source of include files, removing the "include" key while
299        # doing so.
300        #
301        # This treats 'binding_path' as the binding file being built up
302        # and uses it for error messages.
303
304        if "include" not in raw:
305            return raw
306
307        include = raw.pop("include")
308
309        # First, merge the included files together. If more than one included
310        # file has a 'required:' for a particular property, OR the values
311        # together, so that 'required: true' wins.
312
313        merged: Dict[str, Any] = {}
314
315        if isinstance(include, str):
316            # Simple scalar string case
317            # Load YAML file and register property specs into prop2specs.
318            inc_raw = self._load_raw(include, self._inc_allowlist,
319                                     self._inc_blocklist)
320
321            _merge_props(merged, inc_raw, None, binding_path,  False)
322        elif isinstance(include, list):
323            # List of strings and maps. These types may be intermixed.
324            for elem in include:
325                if isinstance(elem, str):
326                    # Load YAML file and register property specs into prop2specs.
327                    inc_raw = self._load_raw(elem, self._inc_allowlist,
328                                             self._inc_blocklist)
329
330                    _merge_props(merged, inc_raw, None, binding_path, False)
331                elif isinstance(elem, dict):
332                    name = elem.pop('name', None)
333
334                    # Merge this include property-allowlist filter
335                    # with filters from including bindings.
336                    allowlist = elem.pop('property-allowlist', None)
337                    if allowlist is not None:
338                        if self._inc_allowlist:
339                            allowlist.extend(self._inc_allowlist)
340                    else:
341                        allowlist = self._inc_allowlist
342
343                    # Merge this include property-blocklist filter
344                    # with filters from including bindings.
345                    blocklist = elem.pop('property-blocklist', None)
346                    if blocklist is not None:
347                        if self._inc_blocklist:
348                            blocklist.extend(self._inc_blocklist)
349                    else:
350                        blocklist = self._inc_blocklist
351
352                    child_filter = elem.pop('child-binding', None)
353
354                    if elem:
355                        # We've popped out all the valid keys.
356                        _err(f"'include:' in {binding_path} should not have "
357                             f"these unexpected contents: {elem}")
358
359                    _check_include_dict(name, allowlist, blocklist,
360                                        child_filter, binding_path)
361
362                    # Load YAML file, and register (filtered) property specs
363                    # into prop2specs.
364                    contents = self._load_raw(name,
365                                              allowlist, blocklist,
366                                              child_filter)
367
368                    _merge_props(merged, contents, None, binding_path, False)
369                else:
370                    _err(f"all elements in 'include:' in {binding_path} "
371                         "should be either strings or maps with a 'name' key "
372                         "and optional 'property-allowlist' or "
373                         f"'property-blocklist' keys, but got: {elem}")
374        else:
375            # Invalid item.
376            _err(f"'include:' in {binding_path} "
377                 f"should be a string or list, but has type {type(include)}")
378
379        # Next, merge the merged included files into 'raw'. Error out if
380        # 'raw' has 'required: false' while the merged included files have
381        # 'required: true'.
382
383        _merge_props(raw, merged, None, binding_path, check_required=True)
384
385        return raw
386
387
388    def _load_raw(self, fname: str,
389                  allowlist: Optional[List[str]] = None,
390                  blocklist: Optional[List[str]] = None,
391                  child_filter: Optional[dict] = None) -> dict:
392        # Returns the contents of the binding given by 'fname' after merging
393        # any bindings it lists in 'include:' into it, according to the given
394        # property filters.
395        #
396        # Will also register the (filtered) included property specs
397        # into prop2specs.
398
399        path = self._fname2path.get(fname)
400
401        if not path:
402            _err(f"'{fname}' not found")
403
404        with open(path, encoding="utf-8") as f:
405            contents = yaml.load(f, Loader=_BindingLoader)
406            if not isinstance(contents, dict):
407                _err(f'{path}: invalid contents, expected a mapping')
408
409        # Apply constraints to included YAML contents.
410        _filter_properties(contents,
411                           allowlist, blocklist,
412                           child_filter, self.path)
413
414        # Register included property specs.
415        self._add_included_prop2specs(fname, contents, allowlist, blocklist)
416
417        return self._merge_includes(contents, path)
418
419    def _add_included_prop2specs(self, fname: str, contents: dict,
420                                 allowlist: Optional[List[str]] = None,
421                                 blocklist: Optional[List[str]] = None) -> None:
422        # Registers the properties specified by an included binding file
423        # into the properties this binding supports/requires (aka prop2specs).
424        #
425        # Consider "this" binding B includes I1 which itself includes I2.
426        #
427        # We assume to be called in that order:
428        # 1) _add_included_prop2spec(B, I1)
429        # 2) _add_included_prop2spec(B, I2)
430        #
431        # Where we don't want I2 "taking ownership" for properties
432        # modified by I1.
433        #
434        # So we:
435        # - first create a binding that represents the included file
436        # - then add the property specs defined by this binding to prop2specs,
437        #   without overriding the specs modified by an including binding
438        #
439        # Note: Unfortunately, we can't cache these base bindings,
440        # as a same YAML file may be included with different filters
441        # (property-allowlist and such), leading to different contents.
442
443        inc_binding = Binding(
444            self._fname2path[fname],
445            self._fname2path,
446            contents,
447            require_compatible=False,
448            require_description=False,
449            # Recursively pass filters to included bindings.
450            inc_allowlist=allowlist,
451            inc_blocklist=blocklist,
452        )
453
454        for prop, spec in inc_binding.prop2specs.items():
455            if prop not in self.prop2specs:
456                self.prop2specs[prop] = spec
457
458    def _check(self, require_compatible: bool, require_description: bool):
459        # Does sanity checking on the binding.
460
461        raw = self.raw
462
463        if "compatible" in raw:
464            compatible = raw["compatible"]
465            if not isinstance(compatible, str):
466                _err(f"malformed 'compatible: {compatible}' "
467                     f"field in {self.path} - "
468                     f"should be a string, not {type(compatible).__name__}")
469        elif require_compatible:
470            _err(f"missing 'compatible' in {self.path}")
471
472        if "description" in raw:
473            description = raw["description"]
474            if not isinstance(description, str) or not description:
475                _err(f"malformed or empty 'description' in {self.path}")
476        elif require_description:
477            _err(f"missing 'description' in {self.path}")
478
479        # Allowed top-level keys. The 'include' key should have been
480        # removed by _load_raw() already.
481        ok_top = {"description", "compatible", "bus", "on-bus",
482                  "properties", "child-binding"}
483
484        # Descriptive errors for legacy bindings.
485        legacy_errors = {
486            "#cells": "expected *-cells syntax",
487            "child": "use 'bus: <bus>' instead",
488            "child-bus": "use 'bus: <bus>' instead",
489            "parent": "use 'on-bus: <bus>' instead",
490            "parent-bus": "use 'on-bus: <bus>' instead",
491            "sub-node": "use 'child-binding' instead",
492            "title": "use 'description' instead",
493        }
494
495        for key in raw:
496            if key in legacy_errors:
497                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
498
499            if key not in ok_top and not key.endswith("-cells"):
500                _err(f"unknown key '{key}' in {self.path}, "
501                     "expected one of {', '.join(ok_top)}, or *-cells")
502
503        if "bus" in raw:
504            bus = raw["bus"]
505            if not isinstance(bus, str) and \
506               (not isinstance(bus, list) and \
507                not all(isinstance(elem, str) for elem in bus)):
508                _err(f"malformed 'bus:' value in {self.path}, "
509                     "expected string or list of strings")
510
511            if isinstance(bus, list):
512                self._buses = bus
513            else:
514                # Convert bus into a list
515                self._buses = [bus]
516
517        if "on-bus" in raw and \
518           not isinstance(raw["on-bus"], str):
519            _err(f"malformed 'on-bus:' value in {self.path}, "
520                 "expected string")
521
522        self._check_properties()
523
524        for key, val in raw.items():
525            if key.endswith("-cells"):
526                if not isinstance(val, list) or \
527                   not all(isinstance(elem, str) for elem in val):
528                    _err(f"malformed '{key}:' in {self.path}, "
529                         "expected a list of strings")
530
531    def _check_properties(self) -> None:
532        # _check() helper for checking the contents of 'properties:'.
533
534        raw = self.raw
535
536        if "properties" not in raw:
537            return
538
539        ok_prop_keys = {"description", "type", "required",
540                        "enum", "const", "default", "deprecated",
541                        "specifier-space"}
542
543        for prop_name, options in raw["properties"].items():
544            for key in options:
545                if key not in ok_prop_keys:
546                    _err(f"unknown setting '{key}' in "
547                         f"'properties: {prop_name}: ...' in {self.path}, "
548                         f"expected one of {', '.join(ok_prop_keys)}")
549
550            _check_prop_by_type(prop_name, options, self.path)
551
552            for true_false_opt in ["required", "deprecated"]:
553                if true_false_opt in options:
554                    option = options[true_false_opt]
555                    if not isinstance(option, bool):
556                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
557                             f"for '{prop_name}' in 'properties' in {self.path}, "
558                             "expected true/false")
559
560            if options.get("deprecated") and options.get("required"):
561                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
562                      "have both 'deprecated' and 'required' set")
563
564            if "description" in options and \
565               not isinstance(options["description"], str):
566                _err("missing, malformed, or empty 'description' for "
567                     f"'{prop_name}' in 'properties' in {self.path}")
568
569            if "enum" in options and not isinstance(options["enum"], list):
570                _err(f"enum in {self.path} for property '{prop_name}' "
571                     "is not a list")
572
573
574class PropertySpec:
575    """
576    Represents a "property specification", i.e. the description of a
577    property provided by a binding file, like its type and description.
578
579    These attributes are available on PropertySpec objects:
580
581    binding:
582      The Binding object which defined this property.
583
584    name:
585      The property's name.
586
587    path:
588      The file where this property was defined. In case a binding includes
589      other bindings, this is the file where the property was last modified.
590
591    type:
592      The type of the property as a string, as given in the binding.
593
594    description:
595      The free-form description of the property as a string, or None.
596
597    enum:
598      A list of values the property may take as given in the binding, or None.
599
600    enum_tokenizable:
601      True if enum is not None and all the values in it are tokenizable;
602      False otherwise.
603
604      A property must have string type and an "enum:" in its binding to be
605      tokenizable. Additionally, the "enum:" values must be unique after
606      converting all non-alphanumeric characters to underscores (so "foo bar"
607      and "foo_bar" in the same "enum:" would not be tokenizable).
608
609    enum_upper_tokenizable:
610      Like 'enum_tokenizable', with the additional restriction that the
611      "enum:" values must be unique after uppercasing and converting
612      non-alphanumeric characters to underscores.
613
614    const:
615      The property's constant value as given in the binding, or None.
616
617    default:
618      The property's default value as given in the binding, or None.
619
620    deprecated:
621      True if the property is deprecated; False otherwise.
622
623    required:
624      True if the property is marked required; False otherwise.
625
626    specifier_space:
627      The specifier space for the property as given in the binding, or None.
628    """
629
630    def __init__(self, name: str, binding: Binding):
631        self.binding: Binding = binding
632        self.name: str = name
633        self._raw: Dict[str, Any] = self.binding.raw["properties"][name]
634
635    def __repr__(self) -> str:
636        return f"<PropertySpec {self.name} type '{self.type}'>"
637
638    @property
639    def path(self) -> Optional[str]:
640        "See the class docstring"
641        return self.binding.path
642
643    @property
644    def type(self) -> str:
645        "See the class docstring"
646        return self._raw["type"]
647
648    @property
649    def description(self) -> Optional[str]:
650        "See the class docstring"
651        return self._raw.get("description")
652
653    @property
654    def enum(self) -> Optional[list]:
655        "See the class docstring"
656        return self._raw.get("enum")
657
658    @property
659    def enum_tokenizable(self) -> bool:
660        "See the class docstring"
661        if not hasattr(self, '_enum_tokenizable'):
662            if self.type != 'string' or self.enum is None:
663                self._enum_tokenizable = False
664            else:
665                # Saving _as_tokens here lets us reuse it in
666                # enum_upper_tokenizable.
667                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
668                                          '_', value)
669                                   for value in self.enum]
670                self._enum_tokenizable = (len(self._as_tokens) ==
671                                          len(set(self._as_tokens)))
672
673        return self._enum_tokenizable
674
675    @property
676    def enum_upper_tokenizable(self) -> bool:
677        "See the class docstring"
678        if not hasattr(self, '_enum_upper_tokenizable'):
679            if not self.enum_tokenizable:
680                self._enum_upper_tokenizable = False
681            else:
682                self._enum_upper_tokenizable = \
683                    (len(self._as_tokens) ==
684                     len(set(x.upper() for x in self._as_tokens)))
685        return self._enum_upper_tokenizable
686
687    @property
688    def const(self) -> Union[None, int, List[int], str, List[str]]:
689        "See the class docstring"
690        return self._raw.get("const")
691
692    @property
693    def default(self) -> Union[None, int, List[int], str, List[str]]:
694        "See the class docstring"
695        return self._raw.get("default")
696
697    @property
698    def required(self) -> bool:
699        "See the class docstring"
700        return self._raw.get("required", False)
701
702    @property
703    def deprecated(self) -> bool:
704        "See the class docstring"
705        return self._raw.get("deprecated", False)
706
707    @property
708    def specifier_space(self) -> Optional[str]:
709        "See the class docstring"
710        return self._raw.get("specifier-space")
711
712PropertyValType = Union[int, str,
713                        List[int], List[str],
714                        'Node', List['Node'],
715                        List[Optional['ControllerAndData']],
716                        bytes, None]
717
718
719@dataclass
720class Property:
721    """
722    Represents a property on a Node, as set in its DT node and with
723    additional info from the 'properties:' section of the binding.
724
725    Only properties mentioned in 'properties:' get created. Properties of type
726    'compound' currently do not get Property instances, as it's not clear
727    what to generate for them.
728
729    These attributes are available on Property objects. Several are
730    just convenience accessors for attributes on the PropertySpec object
731    accessible via the 'spec' attribute.
732
733    These attributes are available on Property objects:
734
735    spec:
736      The PropertySpec object which specifies this property.
737
738    val:
739      The value of the property, with the format determined by spec.type,
740      which comes from the 'type:' string in the binding.
741
742        - For 'type: int/array/string/string-array', 'val' is what you'd expect
743          (a Python integer or string, or a list of them)
744
745        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
746          instance
747
748        - For 'type: phandles', 'val' is a list of the pointed-to Node
749          instances
750
751        - For 'type: phandle-array', 'val' is a list of ControllerAndData
752          instances. See the documentation for that class.
753
754    node:
755      The Node instance the property is on
756
757    name:
758      Convenience for spec.name.
759
760    description:
761      Convenience for spec.description with leading and trailing whitespace
762      (including newlines) removed. May be None.
763
764    type:
765      Convenience for spec.type.
766
767    val_as_token:
768      The value of the property as a token, i.e. with non-alphanumeric
769      characters replaced with underscores. This is only safe to access
770      if 'spec.enum_tokenizable' returns True.
771
772    enum_index:
773      The index of 'val' in 'spec.enum' (which comes from the 'enum:' list
774      in the binding), or None if spec.enum is None.
775    """
776
777    spec: PropertySpec
778    val: PropertyValType
779    node: 'Node'
780
781    @property
782    def name(self) -> str:
783        "See the class docstring"
784        return self.spec.name
785
786    @property
787    def description(self) -> Optional[str]:
788        "See the class docstring"
789        return self.spec.description.strip() if self.spec.description else None
790
791    @property
792    def type(self) -> str:
793        "See the class docstring"
794        return self.spec.type
795
796    @property
797    def val_as_token(self) -> str:
798        "See the class docstring"
799        assert isinstance(self.val, str)
800        return str_as_token(self.val)
801
802    @property
803    def enum_index(self) -> Optional[int]:
804        "See the class docstring"
805        enum = self.spec.enum
806        return enum.index(self.val) if enum else None
807
808
809@dataclass
810class Register:
811    """
812    Represents a register on a node.
813
814    These attributes are available on Register objects:
815
816    node:
817      The Node instance this register is from
818
819    name:
820      The name of the register as given in the 'reg-names' property, or None if
821      there is no 'reg-names' property
822
823    addr:
824      The starting address of the register, in the parent address space, or None
825      if #address-cells is zero. Any 'ranges' properties are taken into account.
826
827    size:
828      The length of the register in bytes
829    """
830
831    node: 'Node'
832    name: Optional[str]
833    addr: Optional[int]
834    size: Optional[int]
835
836
837@dataclass
838class Range:
839    """
840    Represents a translation range on a node as described by the 'ranges' property.
841
842    These attributes are available on Range objects:
843
844    node:
845      The Node instance this range is from
846
847    child_bus_cells:
848      The number of cells used to describe a child bus address.
849
850    child_bus_addr:
851      A physical address within the child bus address space, or None if the
852      child's #address-cells equals 0.
853
854    parent_bus_cells:
855      The number of cells used to describe a parent bus address.
856
857    parent_bus_addr:
858      A physical address within the parent bus address space, or None if the
859      parent's #address-cells equals 0.
860
861    length_cells:
862      The number of cells used to describe the size of range in
863      the child's address space.
864
865    length:
866      The size of the range in the child address space, or None if the
867      child's #size-cells equals 0.
868    """
869    node: 'Node'
870    child_bus_cells: int
871    child_bus_addr: Optional[int]
872    parent_bus_cells: int
873    parent_bus_addr: Optional[int]
874    length_cells: int
875    length: Optional[int]
876
877
878@dataclass
879class ControllerAndData:
880    """
881    Represents an entry in an 'interrupts' or 'type: phandle-array' property
882    value, e.g. <&ctrl-1 4 0> in
883
884        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
885
886    These attributes are available on ControllerAndData objects:
887
888    node:
889      The Node instance the property appears on
890
891    controller:
892      The Node instance for the controller (e.g. the controller the interrupt
893      gets sent to for interrupts)
894
895    data:
896      A dictionary that maps names from the *-cells key in the binding for the
897      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
898      above.
899
900      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
901
902    name:
903      The name of the entry as given in
904      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
905      *-names property
906
907    basename:
908      Basename for the controller when supporting named cells
909    """
910    node: 'Node'
911    controller: 'Node'
912    data: dict
913    name: Optional[str]
914    basename: Optional[str]
915
916
917@dataclass
918class PinCtrl:
919    """
920    Represents a pin control configuration for a set of pins on a device,
921    e.g. pinctrl-0 or pinctrl-1.
922
923    These attributes are available on PinCtrl objects:
924
925    node:
926      The Node instance the pinctrl-* property is on
927
928    name:
929      The name of the configuration, as given in pinctrl-names, or None if
930      there is no pinctrl-names property
931
932    name_as_token:
933      Like 'name', but with non-alphanumeric characters converted to underscores.
934
935    conf_nodes:
936      A list of Node instances for the pin configuration nodes, e.g.
937      the nodes pointed at by &state_1 and &state_2 in
938
939          pinctrl-0 = <&state_1 &state_2>;
940    """
941
942    node: 'Node'
943    name: Optional[str]
944    conf_nodes: List['Node']
945
946    @property
947    def name_as_token(self):
948        "See the class docstring"
949        return str_as_token(self.name) if self.name is not None else None
950
951
952class Node:
953    """
954    Represents a devicetree node, augmented with information from bindings, and
955    with some interpretation of devicetree properties. There's a one-to-one
956    correspondence between devicetree nodes and Nodes.
957
958    These attributes are available on Node objects:
959
960    edt:
961      The EDT instance this node is from
962
963    name:
964      The name of the node
965
966    unit_addr:
967      An integer with the ...@<unit-address> portion of the node name,
968      translated through any 'ranges' properties on parent nodes, or None if
969      the node name has no unit-address portion. PCI devices use a different
970      node name format ...@<dev>,<func> or ...@<dev> (e.g. "pcie@1,0"), in
971      this case None is returned.
972
973    description:
974      The description string from the binding for the node, or None if the node
975      has no binding. Leading and trailing whitespace (including newlines) is
976      removed.
977
978    path:
979      The devicetree path of the node
980
981    label:
982      The text from the 'label' property on the node, or None if the node has
983      no 'label'
984
985    labels:
986      A list of all of the devicetree labels for the node, in the same order
987      as the labels appear, but with duplicates removed.
988
989      This corresponds to the actual devicetree source labels, unlike the
990      "label" attribute, which is the value of a devicetree property named
991      "label".
992
993    parent:
994      The Node instance for the devicetree parent of the Node, or None if the
995      node is the root node
996
997    children:
998      A dictionary with the Node instances for the devicetree children of the
999      node, indexed by name
1000
1001    dep_ordinal:
1002      A non-negative integer value such that the value for a Node is
1003      less than the value for all Nodes that depend on it.
1004
1005      The ordinal is defined for all Nodes, and is unique among nodes in its
1006      EDT 'nodes' list.
1007
1008    required_by:
1009      A list with the nodes that directly depend on the node
1010
1011    depends_on:
1012      A list with the nodes that the node directly depends on
1013
1014    status:
1015      The node's status property value, as a string, or "okay" if the node
1016      has no status property set. If the node's status property is "ok",
1017      it is converted to "okay" for consistency.
1018
1019    read_only:
1020      True if the node has a 'read-only' property, and False otherwise
1021
1022    matching_compat:
1023      The 'compatible' string for the binding that matched the node, or None if
1024      the node has no binding
1025
1026    binding_path:
1027      The path to the binding file for the node, or None if the node has no
1028      binding
1029
1030    compats:
1031      A list of 'compatible' strings for the node, in the same order that
1032      they're listed in the .dts file
1033
1034    ranges:
1035      A list of Range objects extracted from the node's ranges property.
1036      The list is empty if the node does not have a range property.
1037
1038    regs:
1039      A list of Register objects for the node's registers
1040
1041    props:
1042      A dict that maps property names to Property objects.
1043      Property objects are created for all devicetree properties on the node
1044      that are mentioned in 'properties:' in the binding.
1045
1046    aliases:
1047      A list of aliases for the node. This is fetched from the /aliases node.
1048
1049    interrupts:
1050      A list of ControllerAndData objects for the interrupts generated by the
1051      node. The list is empty if the node does not generate interrupts.
1052
1053    pinctrls:
1054      A list of PinCtrl objects for the pinctrl-<index> properties on the
1055      node, sorted by index. The list is empty if the node does not have any
1056      pinctrl-<index> properties.
1057
1058    buses:
1059      If the node is a bus node (has a 'bus:' key in its binding), then this
1060      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
1061      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
1062      If the node is not a bus node, then this attribute is an empty list.
1063
1064    on_buses:
1065      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
1066      multiple protocols are supported via the same bus. The bus is determined
1067      by searching upwards for a parent node whose binding has a 'bus:' key,
1068      returning the value of the first 'bus:' key found. If none of the node's
1069      parents has a 'bus:' key, this attribute is an empty list.
1070
1071    bus_node:
1072      Like on_bus, but contains the Node for the bus controller, or None if the
1073      node is not on a bus.
1074
1075    flash_controller:
1076      The flash controller for the node. Only meaningful for nodes representing
1077      flash partitions.
1078
1079    spi_cs_gpio:
1080      The device's SPI GPIO chip select as a ControllerAndData instance, if it
1081      exists, and None otherwise. See
1082      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
1083
1084    gpio_hogs:
1085      A list of ControllerAndData objects for the GPIOs hogged by the node. The
1086      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
1087      nodes.
1088
1089    is_pci_device:
1090      True if the node is a PCI device.
1091    """
1092
1093    def __init__(self,
1094                 dt_node: dtlib_Node,
1095                 edt: 'EDT',
1096                 compats: List[str]):
1097        '''
1098        For internal use only; not meant to be used outside edtlib itself.
1099        '''
1100        # Public, some of which are initialized properly later:
1101        self.edt: 'EDT' = edt
1102        self.dep_ordinal: int = -1
1103        self.matching_compat: Optional[str] = None
1104        self.binding_path: Optional[str] = None
1105        self.compats: List[str] = compats
1106        self.ranges: List[Range] = []
1107        self.regs: List[Register] = []
1108        self.props: Dict[str, Property] = {}
1109        self.interrupts: List[ControllerAndData] = []
1110        self.pinctrls: List[PinCtrl] = []
1111        self.bus_node: Optional['Node'] = None
1112
1113        # Private, don't touch outside the class:
1114        self._node: dtlib_Node = dt_node
1115        self._binding: Optional[Binding] = None
1116
1117    @property
1118    def name(self) -> str:
1119        "See the class docstring"
1120        return self._node.name
1121
1122    @property
1123    def unit_addr(self) -> Optional[int]:
1124        "See the class docstring"
1125
1126        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1127
1128        # PCI devices use a different node name format (e.g. "pcie@1,0")
1129        if "@" not in self.name or self.is_pci_device:
1130            return None
1131
1132        try:
1133            addr = int(self.name.split("@", 1)[1], 16)
1134        except ValueError:
1135            _err(f"{self!r} has non-hex unit address")
1136
1137        return _translate(addr, self._node)
1138
1139    @property
1140    def description(self) -> Optional[str]:
1141        "See the class docstring."
1142        if self._binding:
1143            return self._binding.description
1144        return None
1145
1146    @property
1147    def path(self) ->  str:
1148        "See the class docstring"
1149        return self._node.path
1150
1151    @property
1152    def label(self) -> Optional[str]:
1153        "See the class docstring"
1154        if "label" in self._node.props:
1155            return self._node.props["label"].to_string()
1156        return None
1157
1158    @property
1159    def labels(self) -> List[str]:
1160        "See the class docstring"
1161        return self._node.labels
1162
1163    @property
1164    def parent(self) -> Optional['Node']:
1165        "See the class docstring"
1166        return self.edt._node2enode.get(self._node.parent) # type: ignore
1167
1168    @property
1169    def children(self) -> Dict[str, 'Node']:
1170        "See the class docstring"
1171        # Could be initialized statically too to preserve identity, but not
1172        # sure if needed. Parent nodes being initialized before their children
1173        # would need to be kept in mind.
1174        return {name: self.edt._node2enode[node]
1175                for name, node in self._node.nodes.items()}
1176
1177    def child_index(self, node) -> int:
1178        """Get the index of *node* in self.children.
1179        Raises KeyError if the argument is not a child of this node.
1180        """
1181        if not hasattr(self, '_child2index'):
1182            # Defer initialization of this lookup table until this
1183            # method is callable to handle parents needing to be
1184            # initialized before their chidlren. By the time we
1185            # return from __init__, 'self.children' is callable.
1186            self._child2index: Dict[str, int] = {}
1187            for index, child_path in enumerate(child.path for child in
1188                                               self.children.values()):
1189                self._child2index[child_path] = index
1190
1191        return self._child2index[node.path]
1192
1193    @property
1194    def required_by(self) -> List['Node']:
1195        "See the class docstring"
1196        return self.edt._graph.required_by(self)
1197
1198    @property
1199    def depends_on(self) -> List['Node']:
1200        "See the class docstring"
1201        return self.edt._graph.depends_on(self)
1202
1203    @property
1204    def status(self) -> str:
1205        "See the class docstring"
1206        status = self._node.props.get("status")
1207
1208        if status is None:
1209            as_string = "okay"
1210        else:
1211            as_string = status.to_string()
1212
1213        if as_string == "ok":
1214            as_string = "okay"
1215
1216        return as_string
1217
1218    @property
1219    def read_only(self) -> bool:
1220        "See the class docstring"
1221        return "read-only" in self._node.props
1222
1223    @property
1224    def aliases(self) -> List[str]:
1225        "See the class docstring"
1226        return [alias for alias, node in self._node.dt.alias2node.items()
1227                if node is self._node]
1228
1229    @property
1230    def buses(self) -> List[str]:
1231        "See the class docstring"
1232        if self._binding:
1233            return self._binding.buses
1234        return []
1235
1236    @property
1237    def on_buses(self) -> List[str]:
1238        "See the class docstring"
1239        bus_node = self.bus_node
1240        return bus_node.buses if bus_node else []
1241
1242    @property
1243    def flash_controller(self) -> 'Node':
1244        "See the class docstring"
1245
1246        # The node path might be something like
1247        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1248        # up two levels to get the flash and check its compat. The flash
1249        # controller might be the flash itself (for cases like NOR flashes).
1250        # For the case of 'soc-nv-flash', we assume the controller is the
1251        # parent of the flash node.
1252
1253        if not self.parent or not self.parent.parent:
1254            _err(f"flash partition {self!r} lacks parent or grandparent node")
1255
1256        controller = self.parent.parent
1257        if controller.matching_compat == "soc-nv-flash":
1258            if controller.parent is None:
1259                _err(f"flash controller '{controller.path}' cannot be the root node")
1260            return controller.parent
1261        return controller
1262
1263    @property
1264    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1265        "See the class docstring"
1266
1267        if not ("spi" in self.on_buses
1268                and self.bus_node
1269                and "cs-gpios" in self.bus_node.props):
1270            return None
1271
1272        if not self.regs:
1273            _err(f"{self!r} needs a 'reg' property, to look up the "
1274                 "chip select index for SPI")
1275
1276        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1277        if TYPE_CHECKING:
1278            assert isinstance(parent_cs_lst, list)
1279
1280        # cs-gpios is indexed by the unit address
1281        cs_index = self.regs[0].addr
1282        if TYPE_CHECKING:
1283            assert isinstance(cs_index, int)
1284
1285        if cs_index >= len(parent_cs_lst):
1286            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1287                 "is >= number of cs-gpios in "
1288                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1289
1290        ret = parent_cs_lst[cs_index]
1291        if TYPE_CHECKING:
1292            assert isinstance(ret, ControllerAndData)
1293        return ret
1294
1295    @property
1296    def gpio_hogs(self) -> List[ControllerAndData]:
1297        "See the class docstring"
1298
1299        if "gpio-hog" not in self.props:
1300            return []
1301
1302        if not self.parent or not "gpio-controller" in self.parent.props:
1303            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1304
1305        if not "#gpio-cells" in self.parent._node.props:
1306            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1307
1308        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1309        res = []
1310
1311        for item in _slice(self._node, "gpios", 4*n_cells,
1312                           f"4*(<#gpio-cells> (= {n_cells})"):
1313            controller = self.parent
1314            res.append(ControllerAndData(
1315                node=self, controller=controller,
1316                data=self._named_cells(controller, item, "gpio"),
1317                name=None, basename="gpio"))
1318
1319        return res
1320
1321    @property
1322    def is_pci_device(self) -> bool:
1323        "See the class docstring"
1324        return 'pcie' in self.on_buses
1325
1326    def __repr__(self) -> str:
1327        if self.binding_path:
1328            binding = "binding " + self.binding_path
1329        else:
1330            binding = "no binding"
1331        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1332
1333    def _init_binding(self) -> None:
1334        # Initializes Node.matching_compat, Node._binding, and
1335        # Node.binding_path.
1336        #
1337        # Node._binding holds the data from the node's binding file, in the
1338        # format returned by PyYAML (plain Python lists, dicts, etc.), or None
1339        # if the node has no binding.
1340
1341        # This relies on the parent of the node having already been
1342        # initialized, which is guaranteed by going through the nodes in
1343        # node_iter() order.
1344
1345        if self.path in self.edt._infer_binding_for_paths:
1346            self._binding_from_properties()
1347            return
1348
1349        if self.compats:
1350            on_buses = self.on_buses
1351
1352            for compat in self.compats:
1353                # When matching, respect the order of the 'compatible' entries,
1354                # and for each one first try to match against an explicitly
1355                # specified bus (if any) and then against any bus. This is so
1356                # that matching against bindings which do not specify a bus
1357                # works the same way in Zephyr as it does elsewhere.
1358                binding = None
1359
1360                for bus in on_buses:
1361                    if (compat, bus) in self.edt._compat2binding:
1362                        binding = self.edt._compat2binding[compat, bus]
1363                        break
1364
1365                if not binding:
1366                    if (compat, None) in self.edt._compat2binding:
1367                        binding = self.edt._compat2binding[compat, None]
1368                    else:
1369                        continue
1370
1371                self.binding_path = binding.path
1372                self.matching_compat = compat
1373                self._binding = binding
1374                return
1375        else:
1376            # No 'compatible' property. See if the parent binding has
1377            # a compatible. This can come from one or more levels of
1378            # nesting with 'child-binding:'.
1379
1380            binding_from_parent = self._binding_from_parent()
1381            if binding_from_parent:
1382                self._binding = binding_from_parent
1383                self.binding_path = self._binding.path
1384                self.matching_compat = self._binding.compatible
1385
1386                return
1387
1388        # No binding found
1389        self._binding = self.binding_path = self.matching_compat = None
1390
1391    def _binding_from_properties(self) -> None:
1392        # Sets up a Binding object synthesized from the properties in the node.
1393
1394        if self.compats:
1395            _err(f"compatible in node with inferred binding: {self.path}")
1396
1397        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1398        raw: Dict[str, Any] = {
1399            'description': 'Inferred binding from properties, via edtlib.',
1400            'properties': {},
1401        }
1402        for name, prop in self._node.props.items():
1403            pp: Dict[str, str] = {}
1404            if prop.type == Type.EMPTY:
1405                pp["type"] = "boolean"
1406            elif prop.type == Type.BYTES:
1407                pp["type"] = "uint8-array"
1408            elif prop.type == Type.NUM:
1409                pp["type"] = "int"
1410            elif prop.type == Type.NUMS:
1411                pp["type"] = "array"
1412            elif prop.type == Type.STRING:
1413                pp["type"] = "string"
1414            elif prop.type == Type.STRINGS:
1415                pp["type"] = "string-array"
1416            elif prop.type == Type.PHANDLE:
1417                pp["type"] = "phandle"
1418            elif prop.type == Type.PHANDLES:
1419                pp["type"] = "phandles"
1420            elif prop.type == Type.PHANDLES_AND_NUMS:
1421                pp["type"] = "phandle-array"
1422            elif prop.type == Type.PATH:
1423                pp["type"] = "path"
1424            else:
1425                _err(f"cannot infer binding from property: {prop} "
1426                     f"with type {prop.type!r}")
1427            raw['properties'][name] = pp
1428
1429        # Set up Node state.
1430        self.binding_path = None
1431        self.matching_compat = None
1432        self.compats = []
1433        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1434
1435    def _binding_from_parent(self) -> Optional[Binding]:
1436        # Returns the binding from 'child-binding:' in the parent node's
1437        # binding.
1438
1439        if not self.parent:
1440            return None
1441
1442        pbinding = self.parent._binding
1443        if not pbinding:
1444            return None
1445
1446        if pbinding.child_binding:
1447            return pbinding.child_binding
1448
1449        return None
1450
1451    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1452                  ) -> Optional['Node']:
1453        # Returns the value for self.bus_node. Relies on parent nodes being
1454        # initialized before their children.
1455
1456        if not self.parent:
1457            # This is the root node
1458            return None
1459
1460        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1461        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1462        # bus be None means we'll always match the binding for fixed-partitions
1463        # also this means want processing the fixed-partitions node we wouldn't
1464        # try to do anything bus specific with it.
1465        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1466            return None
1467
1468        if self.parent.buses:
1469            # The parent node is a bus node
1470            return self.parent
1471
1472        # Same bus node as parent (possibly None)
1473        return self.parent.bus_node
1474
1475    def _init_props(self, default_prop_types: bool = False,
1476                    err_on_deprecated: bool = False) -> None:
1477        # Creates self.props. See the class docstring. Also checks that all
1478        # properties on the node are declared in its binding.
1479
1480        self.props = {}
1481
1482        node = self._node
1483        if self._binding:
1484            prop2specs = self._binding.prop2specs
1485        else:
1486            prop2specs = None
1487
1488        # Initialize self.props
1489        if prop2specs:
1490            for prop_spec in prop2specs.values():
1491                self._init_prop(prop_spec, err_on_deprecated)
1492            self._check_undeclared_props()
1493        elif default_prop_types:
1494            for name in node.props:
1495                if name not in _DEFAULT_PROP_SPECS:
1496                    continue
1497                prop_spec = _DEFAULT_PROP_SPECS[name]
1498                val = self._prop_val(name, prop_spec.type, False, False, None,
1499                                     None, err_on_deprecated)
1500                self.props[name] = Property(prop_spec, val, self)
1501
1502    def _init_prop(self, prop_spec: PropertySpec,
1503                   err_on_deprecated: bool) -> None:
1504        # _init_props() helper for initializing a single property.
1505        # 'prop_spec' is a PropertySpec object from the node's binding.
1506
1507        name = prop_spec.name
1508        prop_type = prop_spec.type
1509        if not prop_type:
1510            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1511
1512        val = self._prop_val(name, prop_type, prop_spec.deprecated,
1513                             prop_spec.required, prop_spec.default,
1514                             prop_spec.specifier_space, err_on_deprecated)
1515
1516        if val is None:
1517            # 'required: false' property that wasn't there, or a property type
1518            # for which we store no data.
1519            return
1520
1521        enum = prop_spec.enum
1522        if enum and val not in enum:
1523            _err(f"value of property '{name}' on {self.path} in "
1524                 f"{self.edt.dts_path} ({val!r}) is not in 'enum' list in "
1525                 f"{self.binding_path} ({enum!r})")
1526
1527        const = prop_spec.const
1528        if const is not None and val != const:
1529            _err(f"value of property '{name}' on {self.path} in "
1530                 f"{self.edt.dts_path} ({val!r}) "
1531                 "is different from the 'const' value specified in "
1532                 f"{self.binding_path} ({const!r})")
1533
1534        # Skip properties that start with '#', like '#size-cells', and mapping
1535        # properties like 'gpio-map'/'interrupt-map'
1536        if name[0] == "#" or name.endswith("-map"):
1537            return
1538
1539        self.props[name] = Property(prop_spec, val, self)
1540
1541    def _prop_val(self, name: str, prop_type: str,
1542                  deprecated: bool, required: bool,
1543                  default: PropertyValType,
1544                  specifier_space: Optional[str],
1545                  err_on_deprecated: bool) -> PropertyValType:
1546        # _init_prop() helper for getting the property's value
1547        #
1548        # name:
1549        #   Property name from binding
1550        #
1551        # prop_type:
1552        #   Property type from binding (a string like "int")
1553        #
1554        # deprecated:
1555        #   True if the property is deprecated
1556        #
1557        # required:
1558        #   True if the property is required to exist
1559        #
1560        # default:
1561        #   Default value to use when the property doesn't exist, or None if
1562        #   the binding doesn't give a default value
1563        #
1564        # specifier_space:
1565        #   Property specifier-space from binding (if prop_type is "phandle-array")
1566        #
1567        # err_on_deprecated:
1568        #   If True, a deprecated property is an error instead of warning.
1569
1570        node = self._node
1571        prop = node.props.get(name)
1572
1573        if prop and deprecated:
1574            msg = (f"'{name}' is marked as deprecated in 'properties:' "
1575                   f"in {self.binding_path} for node {node.path}.")
1576            if err_on_deprecated:
1577                _err(msg)
1578            else:
1579                _LOG.warning(msg)
1580
1581        if not prop:
1582            if required and self.status == "okay":
1583                _err(f"'{name}' is marked as required in 'properties:' in "
1584                     f"{self.binding_path}, but does not appear in {node!r}")
1585
1586            if default is not None:
1587                # YAML doesn't have a native format for byte arrays. We need to
1588                # convert those from an array like [0x12, 0x34, ...]. The
1589                # format has already been checked in
1590                # _check_prop_by_type().
1591                if prop_type == "uint8-array":
1592                    return bytes(default) # type: ignore
1593                return default
1594
1595            return False if prop_type == "boolean" else None
1596
1597        if prop_type == "boolean":
1598            if prop.type != Type.EMPTY:
1599                _err("'{0}' in {1!r} is defined with 'type: boolean' in {2}, "
1600                     "but is assigned a value ('{3}') instead of being empty "
1601                     "('{0};')".format(name, node, self.binding_path, prop))
1602            return True
1603
1604        if prop_type == "int":
1605            return prop.to_num()
1606
1607        if prop_type == "array":
1608            return prop.to_nums()
1609
1610        if prop_type == "uint8-array":
1611            return prop.to_bytes()
1612
1613        if prop_type == "string":
1614            return prop.to_string()
1615
1616        if prop_type == "string-array":
1617            return prop.to_strings()
1618
1619        if prop_type == "phandle":
1620            return self.edt._node2enode[prop.to_node()]
1621
1622        if prop_type == "phandles":
1623            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1624
1625        if prop_type == "phandle-array":
1626            # This type is a bit high-level for dtlib as it involves
1627            # information from bindings and *-names properties, so there's no
1628            # to_phandle_array() in dtlib. Do the type check ourselves.
1629            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1630                _err(f"expected property '{name}' in {node.path} in "
1631                     f"{node.dt.filename} to be assigned "
1632                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1633                     f"(a mix of phandles and numbers), not '{prop}'")
1634
1635            return self._standard_phandle_val_list(prop, specifier_space)
1636
1637        if prop_type == "path":
1638            return self.edt._node2enode[prop.to_path()]
1639
1640        # prop_type == "compound". Checking that the 'type:'
1641        # value is valid is done in _check_prop_by_type().
1642        #
1643        # 'compound' is a dummy type for properties that don't fit any of the
1644        # patterns above, so that we can require all entries in 'properties:'
1645        # to have a 'type: ...'. No Property object is created for it.
1646        return None
1647
1648    def _check_undeclared_props(self) -> None:
1649        # Checks that all properties are declared in the binding
1650
1651        for prop_name in self._node.props:
1652            # Allow a few special properties to not be declared in the binding
1653            if prop_name.endswith("-controller") or \
1654               prop_name.startswith("#") or \
1655               prop_name in {
1656                   "compatible", "status", "ranges", "phandle",
1657                   "interrupt-parent", "interrupts-extended", "device_type"}:
1658                continue
1659
1660            if TYPE_CHECKING:
1661                assert self._binding
1662
1663            if prop_name not in self._binding.prop2specs:
1664                _err(f"'{prop_name}' appears in {self._node.path} in "
1665                     f"{self.edt.dts_path}, but is not declared in "
1666                     f"'properties:' in {self.binding_path}")
1667
1668    def _init_ranges(self) -> None:
1669        # Initializes self.ranges
1670        node = self._node
1671
1672        self.ranges = []
1673
1674        if "ranges" not in node.props:
1675            return
1676
1677        raw_child_address_cells = node.props.get("#address-cells")
1678        parent_address_cells = _address_cells(node)
1679        if raw_child_address_cells is None:
1680            child_address_cells = 2 # Default value per DT spec.
1681        else:
1682            child_address_cells = raw_child_address_cells.to_num()
1683        raw_child_size_cells = node.props.get("#size-cells")
1684        if raw_child_size_cells is None:
1685            child_size_cells = 1 # Default value per DT spec.
1686        else:
1687            child_size_cells = raw_child_size_cells.to_num()
1688
1689        # Number of cells for one translation 3-tuple in 'ranges'
1690        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1691
1692        if entry_cells == 0:
1693            if len(node.props["ranges"].value) == 0:
1694                return
1695            else:
1696                _err(f"'ranges' should be empty in {self._node.path} since "
1697                     f"<#address-cells> = {child_address_cells}, "
1698                     f"<#address-cells for parent> = {parent_address_cells} and "
1699                     f"<#size-cells> = {child_size_cells}")
1700
1701        for raw_range in _slice(node, "ranges", 4*entry_cells,
1702                                f"4*(<#address-cells> (= {child_address_cells}) + "
1703                                "<#address-cells for parent> "
1704                                f"(= {parent_address_cells}) + "
1705                                f"<#size-cells> (= {child_size_cells}))"):
1706
1707            child_bus_cells = child_address_cells
1708            if child_address_cells == 0:
1709                child_bus_addr = None
1710            else:
1711                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1712            parent_bus_cells = parent_address_cells
1713            if parent_address_cells == 0:
1714                parent_bus_addr = None
1715            else:
1716                parent_bus_addr = to_num(
1717                    raw_range[(4*child_address_cells):
1718                              (4*child_address_cells + 4*parent_address_cells)])
1719            length_cells = child_size_cells
1720            if child_size_cells == 0:
1721                length = None
1722            else:
1723                length = to_num(
1724                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1725
1726            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1727                                     parent_bus_cells, parent_bus_addr,
1728                                     length_cells, length))
1729
1730    def _init_regs(self) -> None:
1731        # Initializes self.regs
1732
1733        node = self._node
1734
1735        self.regs = []
1736
1737        if "reg" not in node.props:
1738            return
1739
1740        address_cells = _address_cells(node)
1741        size_cells = _size_cells(node)
1742
1743        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1744                              f"4*(<#address-cells> (= {address_cells}) + "
1745                              f"<#size-cells> (= {size_cells}))"):
1746            if address_cells == 0:
1747                addr = None
1748            else:
1749                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1750            if size_cells == 0:
1751                size = None
1752            else:
1753                size = to_num(raw_reg[4*address_cells:])
1754            # Size zero is ok for PCI devices
1755            if size_cells != 0 and size == 0 and not self.is_pci_device:
1756                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1757                     "(maybe you want a size of one or #size-cells = 0 "
1758                     "instead)")
1759
1760            # We'll fix up the name when we're done.
1761            self.regs.append(Register(self, None, addr, size))
1762
1763        _add_names(node, "reg", self.regs)
1764
1765    def _init_pinctrls(self) -> None:
1766        # Initializes self.pinctrls from any pinctrl-<index> properties
1767
1768        node = self._node
1769
1770        # pinctrl-<index> properties
1771        pinctrl_props = [prop for name, prop in node.props.items()
1772                         if re.match("pinctrl-[0-9]+", name)]
1773        # Sort by index
1774        pinctrl_props.sort(key=lambda prop: prop.name)
1775
1776        # Check indices
1777        for i, prop in enumerate(pinctrl_props):
1778            if prop.name != "pinctrl-" + str(i):
1779                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1780                     "- indices should be contiguous and start from zero")
1781
1782        self.pinctrls = []
1783        for prop in pinctrl_props:
1784            # We'll fix up the names below.
1785            self.pinctrls.append(PinCtrl(
1786                node=self,
1787                name=None,
1788                conf_nodes=[self.edt._node2enode[node]
1789                            for node in prop.to_nodes()]))
1790
1791        _add_names(node, "pinctrl", self.pinctrls)
1792
1793    def _init_interrupts(self) -> None:
1794        # Initializes self.interrupts
1795
1796        node = self._node
1797
1798        self.interrupts = []
1799
1800        for controller_node, data in _interrupts(node):
1801            # We'll fix up the names below.
1802            controller = self.edt._node2enode[controller_node]
1803            self.interrupts.append(ControllerAndData(
1804                node=self, controller=controller,
1805                data=self._named_cells(controller, data, "interrupt"),
1806                name=None, basename=None))
1807
1808        _add_names(node, "interrupt", self.interrupts)
1809
1810    def _standard_phandle_val_list(
1811            self,
1812            prop: dtlib_Property,
1813            specifier_space: Optional[str]
1814    ) -> List[Optional[ControllerAndData]]:
1815        # Parses a property like
1816        #
1817        #     <prop.name> = <phandle cell phandle cell ...>;
1818        #
1819        # where each phandle points to a controller node that has a
1820        #
1821        #     #<specifier_space>-cells = <size>;
1822        #
1823        # property that gives the number of cells in the value after the
1824        # controller's phandle in the property.
1825        #
1826        # E.g. with a property like
1827        #
1828        #     pwms = <&foo 1 2 &bar 3>;
1829        #
1830        # If 'specifier_space' is "pwm", then we should have this elsewhere
1831        # in the tree:
1832        #
1833        #     foo: ... {
1834        #             #pwm-cells = <2>;
1835        #     };
1836        #
1837        #     bar: ... {
1838        #             #pwm-cells = <1>;
1839        #     };
1840        #
1841        # These values can be given names using the <specifier_space>-names:
1842        # list in the binding for the phandle nodes.
1843        #
1844        # Also parses any
1845        #
1846        #     <specifier_space>-names = "...", "...", ...
1847        #
1848        # Returns a list of Optional[ControllerAndData] instances.
1849        #
1850        # An index is None if the underlying phandle-array element is
1851        # unspecified.
1852
1853        if not specifier_space:
1854            if prop.name.endswith("gpios"):
1855                # There's some slight special-casing for *-gpios properties in that
1856                # e.g. foo-gpios still maps to #gpio-cells rather than
1857                # #foo-gpio-cells
1858                specifier_space = "gpio"
1859            else:
1860                # Strip -s. We've already checked that property names end in -s
1861                # if there is no specifier space in _check_prop_by_type().
1862                specifier_space = prop.name[:-1]
1863
1864        res: List[Optional[ControllerAndData]] = []
1865
1866        for item in _phandle_val_list(prop, specifier_space):
1867            if item is None:
1868                res.append(None)
1869                continue
1870
1871            controller_node, data = item
1872            mapped_controller, mapped_data = \
1873                _map_phandle_array_entry(prop.node, controller_node, data,
1874                                         specifier_space)
1875
1876            controller = self.edt._node2enode[mapped_controller]
1877            # We'll fix up the names below.
1878            res.append(ControllerAndData(
1879                node=self, controller=controller,
1880                data=self._named_cells(controller, mapped_data,
1881                                       specifier_space),
1882                name=None, basename=specifier_space))
1883
1884        _add_names(self._node, specifier_space, res)
1885
1886        return res
1887
1888    def _named_cells(
1889            self,
1890            controller: 'Node',
1891            data: bytes,
1892            basename: str
1893    ) -> Dict[str, int]:
1894        # Returns a dictionary that maps <basename>-cells names given in the
1895        # binding for 'controller' to cell values. 'data' is the raw data, as a
1896        # byte array.
1897
1898        if not controller._binding:
1899            _err(f"{basename} controller {controller._node!r} "
1900                 f"for {self._node!r} lacks binding")
1901
1902        if basename in controller._binding.specifier2cells:
1903            cell_names: List[str] = controller._binding.specifier2cells[basename]
1904        else:
1905            # Treat no *-cells in the binding the same as an empty *-cells, so
1906            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1907            # '#clock-cells = <0>'.
1908            cell_names = []
1909
1910        data_list = to_nums(data)
1911        if len(data_list) != len(cell_names):
1912            _err(f"unexpected '{basename}-cells:' length in binding for "
1913                 f"{controller._node!r} - {len(cell_names)} "
1914                 f"instead of {len(data_list)}")
1915
1916        return dict(zip(cell_names, data_list))
1917
1918
1919class EDT:
1920    """
1921    Represents a devicetree augmented with information from bindings.
1922
1923    These attributes are available on EDT objects:
1924
1925    nodes:
1926      A list of Node objects for the nodes that appear in the devicetree
1927
1928    compat2nodes:
1929      A collections.defaultdict that maps each 'compatible' string that appears
1930      on some Node to a list of Nodes with that compatible.
1931
1932    compat2okay:
1933      Like compat2nodes, but just for nodes with status 'okay'.
1934
1935    compat2vendor:
1936      A collections.defaultdict that maps each 'compatible' string that appears
1937      on some Node to a vendor name parsed from vendor_prefixes.
1938
1939    compat2model:
1940      A collections.defaultdict that maps each 'compatible' string that appears
1941      on some Node to a model name parsed from that compatible.
1942
1943    label2node:
1944      A dict that maps a node label to the node with that label.
1945
1946    dep_ord2node:
1947      A dict that maps an ordinal to the node with that dependency ordinal.
1948
1949    chosen_nodes:
1950      A dict that maps the properties defined on the devicetree's /chosen
1951      node to their values. 'chosen' is indexed by property name (a string),
1952      and values are converted to Node objects. Note that properties of the
1953      /chosen node which can't be converted to a Node are not included in
1954      the value.
1955
1956    dts_path:
1957      The .dts path passed to __init__()
1958
1959    dts_source:
1960      The final DTS source code of the loaded devicetree after merging nodes
1961      and processing /delete-node/ and /delete-property/, as a string
1962
1963    bindings_dirs:
1964      The bindings directory paths passed to __init__()
1965
1966    scc_order:
1967      A list of lists of Nodes. All elements of each list
1968      depend on each other, and the Nodes in any list do not depend
1969      on any Node in a subsequent list. Each list defines a Strongly
1970      Connected Component (SCC) of the graph.
1971
1972      For an acyclic graph each list will be a singleton. Cycles
1973      will be represented by lists with multiple nodes. Cycles are
1974      not expected to be present in devicetree graphs.
1975
1976    The standard library's pickle module can be used to marshal and
1977    unmarshal EDT objects.
1978    """
1979
1980    def __init__(self,
1981                 dts: Optional[str],
1982                 bindings_dirs: List[str],
1983                 warn_reg_unit_address_mismatch: bool = True,
1984                 default_prop_types: bool = True,
1985                 support_fixed_partitions_on_any_bus: bool = True,
1986                 infer_binding_for_paths: Optional[Iterable[str]] = None,
1987                 vendor_prefixes: Optional[Dict[str, str]] = None,
1988                 werror: bool = False):
1989        """EDT constructor.
1990
1991        dts:
1992          Path to devicetree .dts file. Passing None for this value
1993          is only for internal use; do not do that outside of edtlib.
1994
1995        bindings_dirs:
1996          List of paths to directories containing bindings, in YAML format.
1997          These directories are recursively searched for .yaml files.
1998
1999        warn_reg_unit_address_mismatch (default: True):
2000          If True, a warning is logged if a node has a 'reg' property where
2001          the address of the first entry does not match the unit address of the
2002          node
2003
2004        default_prop_types (default: True):
2005          If True, default property types will be used when a node has no
2006          bindings.
2007
2008        support_fixed_partitions_on_any_bus (default True):
2009          If True, set the Node.bus for 'fixed-partitions' compatible nodes
2010          to None.  This allows 'fixed-partitions' binding to match regardless
2011          of the bus the 'fixed-partition' is under.
2012
2013        infer_binding_for_paths (default: None):
2014          An iterable of devicetree paths identifying nodes for which bindings
2015          should be inferred from the node content.  (Child nodes are not
2016          processed.)  Pass none if no nodes should support inferred bindings.
2017
2018        vendor_prefixes (default: None):
2019          A dict mapping vendor prefixes in compatible properties to their
2020          descriptions. If given, compatibles in the form "manufacturer,device"
2021          for which "manufacturer" is neither a key in the dict nor a specially
2022          exempt set of grandfathered-in cases will cause warnings.
2023
2024        werror (default: False):
2025          If True, some edtlib specific warnings become errors. This currently
2026          errors out if 'dts' has any deprecated properties set, or an unknown
2027          vendor prefix is used.
2028        """
2029        # All instance attributes should be initialized here.
2030        # This makes it easy to keep track of them, which makes
2031        # implementing __deepcopy__() easier.
2032        # If you change this, make sure to update __deepcopy__() too,
2033        # and update the tests for that method.
2034
2035        # Public attributes (the rest are properties)
2036        self.nodes: List[Node] = []
2037        self.compat2nodes: Dict[str, List[Node]] = defaultdict(list)
2038        self.compat2okay: Dict[str, List[Node]] = defaultdict(list)
2039        self.compat2vendor: Dict[str, str] = defaultdict(str)
2040        self.compat2model: Dict[str, str]  = defaultdict(str)
2041        self.label2node: Dict[str, Node] = {}
2042        self.dep_ord2node: Dict[int, Node] = {}
2043        self.dts_path: str = dts # type: ignore
2044        self.bindings_dirs: List[str] = list(bindings_dirs)
2045
2046        # Saved kwarg values for internal use
2047        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
2048        self._default_prop_types: bool = default_prop_types
2049        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
2050        self._infer_binding_for_paths: Set[str] = set(infer_binding_for_paths or [])
2051        self._vendor_prefixes: Dict[str, str] = vendor_prefixes or {}
2052        self._werror: bool = bool(werror)
2053
2054        # Other internal state
2055        self._compat2binding: Dict[Tuple[str, Optional[str]], Binding] = {}
2056        self._graph: Graph = Graph()
2057        self._binding_paths: List[str] = _binding_paths(self.bindings_dirs)
2058        self._binding_fname2path: Dict[str, str] = {
2059            os.path.basename(path): path
2060            for path in self._binding_paths
2061        }
2062        self._node2enode: Dict[dtlib_Node, Node] = {}
2063
2064        if dts is not None:
2065            try:
2066                self._dt = DT(dts)
2067            except DTError as e:
2068                raise EDTError(e) from e
2069            self._finish_init()
2070
2071    def _finish_init(self) -> None:
2072        # This helper exists to make the __deepcopy__() implementation
2073        # easier to keep in sync with __init__().
2074        _check_dt(self._dt)
2075
2076        self._init_compat2binding()
2077        self._init_nodes()
2078        self._init_graph()
2079        self._init_luts()
2080
2081        self._check()
2082
2083    def get_node(self, path: str) -> Node:
2084        """
2085        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
2086        path or alias doesn't exist.
2087        """
2088        try:
2089            return self._node2enode[self._dt.get_node(path)]
2090        except DTError as e:
2091            _err(e)
2092
2093    @property
2094    def chosen_nodes(self) -> Dict[str, Node]:
2095        ret: Dict[str, Node] = {}
2096
2097        try:
2098            chosen = self._dt.get_node("/chosen")
2099        except DTError:
2100            return ret
2101
2102        for name, prop in chosen.props.items():
2103            try:
2104                node = prop.to_path()
2105            except DTError:
2106                # DTS value is not phandle or string, or path doesn't exist
2107                continue
2108
2109            ret[name] = self._node2enode[node]
2110
2111        return ret
2112
2113    def chosen_node(self, name: str) -> Optional[Node]:
2114        """
2115        Returns the Node pointed at by the property named 'name' in /chosen, or
2116        None if the property is missing
2117        """
2118        return self.chosen_nodes.get(name)
2119
2120    @property
2121    def dts_source(self) -> str:
2122        return f"{self._dt}"
2123
2124    def __repr__(self) -> str:
2125        return f"<EDT for '{self.dts_path}', binding directories " \
2126            f"'{self.bindings_dirs}'>"
2127
2128    def __deepcopy__(self, memo) -> 'EDT':
2129        """
2130        Implements support for the standard library copy.deepcopy()
2131        function on EDT instances.
2132        """
2133
2134        ret = EDT(
2135            None,
2136            self.bindings_dirs,
2137            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2138            default_prop_types=self._default_prop_types,
2139            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2140            infer_binding_for_paths=set(self._infer_binding_for_paths),
2141            vendor_prefixes=dict(self._vendor_prefixes),
2142            werror=self._werror
2143        )
2144        ret.dts_path = self.dts_path
2145        ret._dt = deepcopy(self._dt, memo)
2146        ret._finish_init()
2147        return ret
2148
2149    @property
2150    def scc_order(self) -> List[List[Node]]:
2151        try:
2152            return self._graph.scc_order()
2153        except Exception as e:
2154            raise EDTError(e)
2155
2156    def _process_properties_r(self, root_node, props_node):
2157        """
2158        Process props_node properties for dependencies, and add those as
2159        dependencies of root_node. Then walk through all the props_node
2160        children and do the same recursively, maintaining the same root_node.
2161
2162        This ensures that on a node with child nodes, the parent node includes
2163        the dependencies of all the child nodes as well as its own.
2164        """
2165        # A Node depends on any Nodes present in 'phandle',
2166        # 'phandles', or 'phandle-array' property values.
2167        for prop in props_node.props.values():
2168            if prop.type == 'phandle':
2169                self._graph.add_edge(root_node, prop.val)
2170            elif prop.type == 'phandles':
2171                if TYPE_CHECKING:
2172                    assert isinstance(prop.val, list)
2173                for phandle_node in prop.val:
2174                    self._graph.add_edge(root_node, phandle_node)
2175            elif prop.type == 'phandle-array':
2176                if TYPE_CHECKING:
2177                    assert isinstance(prop.val, list)
2178                for cd in prop.val:
2179                    if cd is None:
2180                        continue
2181                    if TYPE_CHECKING:
2182                        assert isinstance(cd, ControllerAndData)
2183                    self._graph.add_edge(root_node, cd.controller)
2184
2185        # A Node depends on whatever supports the interrupts it
2186        # generates.
2187        for intr in props_node.interrupts:
2188            self._graph.add_edge(root_node, intr.controller)
2189
2190        # If the binding defines child bindings, link the child properties to
2191        # the root_node as well.
2192        if props_node._binding and props_node._binding.child_binding:
2193            for child in props_node.children.values():
2194                if "compatible" in child.props:
2195                    # Not a child node, normal node on a different binding.
2196                    continue
2197                self._process_properties_r(root_node, child)
2198
2199    def _process_properties(self, node):
2200        """
2201        Add node dependencies based on own as well as child node properties,
2202        start from the node itself.
2203        """
2204        self._process_properties_r(node, node)
2205
2206    def _init_graph(self) -> None:
2207        # Constructs a graph of dependencies between Node instances,
2208        # which is usable for computing a partial order over the dependencies.
2209        # The algorithm supports detecting dependency loops.
2210        #
2211        # Actually computing the SCC order is lazily deferred to the
2212        # first time the scc_order property is read.
2213
2214        for node in self.nodes:
2215            # Always insert root node
2216            if not node.parent:
2217                self._graph.add_node(node)
2218
2219            # A Node always depends on its parent.
2220            for child in node.children.values():
2221                self._graph.add_edge(child, node)
2222
2223            self._process_properties(node)
2224
2225    def _init_compat2binding(self) -> None:
2226        # Creates self._compat2binding, a dictionary that maps
2227        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2228        #
2229        # The Binding objects are created from YAML files discovered
2230        # in self.bindings_dirs as needed.
2231        #
2232        # For example, self._compat2binding["company,dev", "can"]
2233        # contains the Binding for the 'company,dev' device, when it
2234        # appears on the CAN bus.
2235        #
2236        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2237        # self._compat2binding["company,notonbus", None] is the Binding.
2238        #
2239        # Only bindings for 'compatible' strings that appear in the devicetree
2240        # are loaded.
2241
2242        dt_compats = _dt_compats(self._dt)
2243        # Searches for any 'compatible' string mentioned in the devicetree
2244        # files, with a regex
2245        dt_compats_search = re.compile(
2246            "|".join(re.escape(compat) for compat in dt_compats)
2247        ).search
2248
2249        for binding_path in self._binding_paths:
2250            with open(binding_path, encoding="utf-8") as f:
2251                contents = f.read()
2252
2253            # As an optimization, skip parsing files that don't contain any of
2254            # the .dts 'compatible' strings, which should be reasonably safe
2255            if not dt_compats_search(contents):
2256                continue
2257
2258            # Load the binding and check that it actually matches one of the
2259            # compatibles. Might get false positives above due to comments and
2260            # stuff.
2261
2262            try:
2263                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2264                # representing the file)
2265                raw = yaml.load(contents, Loader=_BindingLoader)
2266            except yaml.YAMLError as e:
2267                _err(
2268                        f"'{binding_path}' appears in binding directories "
2269                        f"but isn't valid YAML: {e}")
2270                continue
2271
2272            # Convert the raw data to a Binding object, erroring out
2273            # if necessary.
2274            binding = self._binding(raw, binding_path, dt_compats)
2275
2276            # Register the binding in self._compat2binding, along with
2277            # any child bindings that have their own compatibles.
2278            while binding is not None:
2279                if binding.compatible:
2280                    self._register_binding(binding)
2281                binding = binding.child_binding
2282
2283    def _binding(self,
2284                 raw: Optional[dict],
2285                 binding_path: str,
2286                 dt_compats: Set[str]) -> Optional[Binding]:
2287        # Convert a 'raw' binding from YAML to a Binding object and return it.
2288        #
2289        # Error out if the raw data looks like an invalid binding.
2290        #
2291        # Return None if the file doesn't contain a binding or the
2292        # binding's compatible isn't in dt_compats.
2293
2294        # Get the 'compatible:' string.
2295        if raw is None or "compatible" not in raw:
2296            # Empty file, binding fragment, spurious file, etc.
2297            return None
2298
2299        compatible = raw["compatible"]
2300
2301        if compatible not in dt_compats:
2302            # Not a compatible we care about.
2303            return None
2304
2305        # Initialize and return the Binding object.
2306        return Binding(binding_path, self._binding_fname2path, raw=raw)
2307
2308    def _register_binding(self, binding: Binding) -> None:
2309        # Do not allow two different bindings to have the same
2310        # 'compatible:'/'on-bus:' combo
2311        if TYPE_CHECKING:
2312            assert binding.compatible
2313        old_binding = self._compat2binding.get((binding.compatible,
2314                                                binding.on_bus))
2315        if old_binding:
2316            msg = (f"both {old_binding.path} and {binding.path} have "
2317                   f"'compatible: {binding.compatible}'")
2318            if binding.on_bus is not None:
2319                msg += f" and 'on-bus: {binding.on_bus}'"
2320            _err(msg)
2321
2322        # Register the binding.
2323        self._compat2binding[binding.compatible, binding.on_bus] = binding
2324
2325    def _init_nodes(self) -> None:
2326        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2327        # self.nodes
2328
2329        for dt_node in self._dt.node_iter():
2330            # Warning: We depend on parent Nodes being created before their
2331            # children. This is guaranteed by node_iter().
2332            if "compatible" in dt_node.props:
2333                compats = dt_node.props["compatible"].to_strings()
2334            else:
2335                compats = []
2336            node = Node(dt_node, self, compats)
2337            node.bus_node = node._bus_node(self._fixed_partitions_no_bus)
2338            node._init_binding()
2339            node._init_regs()
2340            node._init_ranges()
2341
2342            self.nodes.append(node)
2343            self._node2enode[dt_node] = node
2344
2345        for node in self.nodes:
2346            # These depend on all Node objects having been created, because
2347            # they (either always or sometimes) reference other nodes, so we
2348            # run them separately
2349            node._init_props(default_prop_types=self._default_prop_types,
2350                             err_on_deprecated=self._werror)
2351            node._init_interrupts()
2352            node._init_pinctrls()
2353
2354        if self._warn_reg_unit_address_mismatch:
2355            # This warning matches the simple_bus_reg warning in dtc
2356            for node in self.nodes:
2357                # Address mismatch is ok for PCI devices
2358                if (node.regs and node.regs[0].addr != node.unit_addr and
2359                        not node.is_pci_device):
2360                    _LOG.warning("unit address and first address in 'reg' "
2361                                 f"(0x{node.regs[0].addr:x}) don't match for "
2362                                 f"{node.path}")
2363
2364    def _init_luts(self) -> None:
2365        # Initialize node lookup tables (LUTs).
2366
2367        for node in self.nodes:
2368            for label in node.labels:
2369                self.label2node[label] = node
2370
2371            for compat in node.compats:
2372                self.compat2nodes[compat].append(node)
2373
2374                if node.status == "okay":
2375                    self.compat2okay[compat].append(node)
2376
2377                if compat in self.compat2vendor:
2378                    continue
2379
2380                # The regular expression comes from dt-schema.
2381                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2382                if not re.match(compat_re, compat):
2383                    _err(f"node '{node.path}' compatible '{compat}' "
2384                         'must match this regular expression: '
2385                         f"'{compat_re}'")
2386
2387                if ',' in compat and self._vendor_prefixes:
2388                    vendor, model = compat.split(',', 1)
2389                    if vendor in self._vendor_prefixes:
2390                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2391                        self.compat2model[compat] = model
2392
2393                    # As an exception, the root node can have whatever
2394                    # compatibles it wants. Other nodes get checked.
2395                    elif node.path != '/':
2396                        if self._werror:
2397                            handler_fn: Any = _err
2398                        else:
2399                            handler_fn = _LOG.warning
2400                        handler_fn(
2401                            f"node '{node.path}' compatible '{compat}' "
2402                            f"has unknown vendor prefix '{vendor}'")
2403
2404
2405        for nodeset in self.scc_order:
2406            node = nodeset[0]
2407            self.dep_ord2node[node.dep_ordinal] = node
2408
2409    def _check(self) -> None:
2410        # Tree-wide checks and warnings.
2411
2412        for binding in self._compat2binding.values():
2413            for spec in binding.prop2specs.values():
2414                if not spec.enum or spec.type != 'string':
2415                    continue
2416
2417                if not spec.enum_tokenizable:
2418                    _LOG.warning(
2419                        f"compatible '{binding.compatible}' "
2420                        f"in binding '{binding.path}' has non-tokenizable enum "
2421                        f"for property '{spec.name}': " +
2422                        ', '.join(repr(x) for x in spec.enum))
2423                elif not spec.enum_upper_tokenizable:
2424                    _LOG.warning(
2425                        f"compatible '{binding.compatible}' "
2426                        f"in binding '{binding.path}' has enum for property "
2427                        f"'{spec.name}' that is only tokenizable "
2428                        'in lowercase: ' +
2429                        ', '.join(repr(x) for x in spec.enum))
2430
2431        # Validate the contents of compatible properties.
2432        for node in self.nodes:
2433            if 'compatible' not in node.props:
2434                continue
2435
2436            compatibles = node.props['compatible'].val
2437
2438            # _check() runs after _init_compat2binding() has called
2439            # _dt_compats(), which already converted every compatible
2440            # property to a list of strings. So we know 'compatibles'
2441            # is a list, but add an assert for future-proofing.
2442            assert isinstance(compatibles, list)
2443
2444            for compat in compatibles:
2445                # This is also just for future-proofing.
2446                assert isinstance(compat, str)
2447
2448
2449def bindings_from_paths(yaml_paths: List[str],
2450                        ignore_errors: bool = False) -> List[Binding]:
2451    """
2452    Get a list of Binding objects from the yaml files 'yaml_paths'.
2453
2454    If 'ignore_errors' is True, YAML files that cause an EDTError when
2455    loaded are ignored. (No other exception types are silenced.)
2456    """
2457
2458    ret = []
2459    fname2path = {os.path.basename(path): path for path in yaml_paths}
2460    for path in yaml_paths:
2461        try:
2462            ret.append(Binding(path, fname2path))
2463        except EDTError:
2464            if ignore_errors:
2465                continue
2466            raise
2467
2468    return ret
2469
2470
2471class EDTError(Exception):
2472    "Exception raised for devicetree- and binding-related errors"
2473
2474#
2475# Public global functions
2476#
2477
2478
2479def load_vendor_prefixes_txt(vendor_prefixes: str) -> Dict[str, str]:
2480    """Load a vendor-prefixes.txt file and return a dict
2481    representation mapping a vendor prefix to the vendor name.
2482    """
2483    vnd2vendor: Dict[str, str] = {}
2484    with open(vendor_prefixes, 'r', encoding='utf-8') as f:
2485        for line in f:
2486            line = line.strip()
2487
2488            if not line or line.startswith('#'):
2489                # Comment or empty line.
2490                continue
2491
2492            # Other lines should be in this form:
2493            #
2494            # <vnd><TAB><vendor>
2495            vnd_vendor = line.split('\t', 1)
2496            assert len(vnd_vendor) == 2, line
2497            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2498    return vnd2vendor
2499
2500#
2501# Private global functions
2502#
2503
2504
2505def _dt_compats(dt: DT) -> Set[str]:
2506    # Returns a set() with all 'compatible' strings in the devicetree
2507    # represented by dt (a dtlib.DT instance)
2508
2509    return {compat
2510            for node in dt.node_iter()
2511                if "compatible" in node.props
2512                    for compat in node.props["compatible"].to_strings()}
2513
2514
2515def _binding_paths(bindings_dirs: List[str]) -> List[str]:
2516    # Returns a list with the paths to all bindings (.yaml files) in
2517    # 'bindings_dirs'
2518
2519    binding_paths = []
2520
2521    for bindings_dir in bindings_dirs:
2522        for root, _, filenames in os.walk(bindings_dir):
2523            for filename in filenames:
2524                if filename.endswith(".yaml") or filename.endswith(".yml"):
2525                    binding_paths.append(os.path.join(root, filename))
2526
2527    return binding_paths
2528
2529
2530def _binding_inc_error(msg):
2531    # Helper for reporting errors in the !include implementation
2532
2533    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2534
2535
2536def _check_include_dict(name: Optional[str],
2537                        allowlist: Optional[List[str]],
2538                        blocklist: Optional[List[str]],
2539                        child_filter: Optional[dict],
2540                        binding_path: Optional[str]) -> None:
2541    # Check that an 'include:' named 'name' with property-allowlist
2542    # 'allowlist', property-blocklist 'blocklist', and
2543    # child-binding filter 'child_filter' has valid structure.
2544
2545    if name is None:
2546        _err(f"'include:' element in {binding_path} "
2547             "should have a 'name' key")
2548
2549    if allowlist is not None and blocklist is not None:
2550        _err(f"'include:' of file '{name}' in {binding_path} "
2551             "should not specify both 'property-allowlist:' "
2552             "and 'property-blocklist:'")
2553
2554    while child_filter is not None:
2555        child_copy = deepcopy(child_filter)
2556        child_allowlist: Optional[List[str]] = \
2557            child_copy.pop('property-allowlist', None)
2558        child_blocklist: Optional[List[str]] = \
2559            child_copy.pop('property-blocklist', None)
2560        next_child_filter: Optional[dict] = \
2561            child_copy.pop('child-binding', None)
2562
2563        if child_copy:
2564            # We've popped out all the valid keys.
2565            _err(f"'include:' of file '{name}' in {binding_path} "
2566                 "should not have these unexpected contents in a "
2567                 f"'child-binding': {child_copy}")
2568
2569        if child_allowlist is not None and child_blocklist is not None:
2570            _err(f"'include:' of file '{name}' in {binding_path} "
2571                 "should not specify both 'property-allowlist:' and "
2572                 "'property-blocklist:' in a 'child-binding:'")
2573
2574        child_filter = next_child_filter
2575
2576
2577def _filter_properties(raw: dict,
2578                       allowlist: Optional[List[str]],
2579                       blocklist: Optional[List[str]],
2580                       child_filter: Optional[dict],
2581                       binding_path: Optional[str]) -> None:
2582    # Destructively modifies 'raw["properties"]' and
2583    # 'raw["child-binding"]', if they exist, according to
2584    # 'allowlist', 'blocklist', and 'child_filter'.
2585
2586    props = raw.get('properties')
2587    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2588
2589    child_binding = raw.get('child-binding')
2590    while child_filter is not None and child_binding is not None:
2591        _filter_properties_helper(child_binding.get('properties'),
2592                                  child_filter.get('property-allowlist'),
2593                                  child_filter.get('property-blocklist'),
2594                                  binding_path)
2595        child_filter = child_filter.get('child-binding')
2596        child_binding = child_binding.get('child-binding')
2597
2598
2599def _filter_properties_helper(props: Optional[dict],
2600                              allowlist: Optional[List[str]],
2601                              blocklist: Optional[List[str]],
2602                              binding_path: Optional[str]) -> None:
2603    if props is None or (allowlist is None and blocklist is None):
2604        return
2605
2606    _check_prop_filter('property-allowlist', allowlist, binding_path)
2607    _check_prop_filter('property-blocklist', blocklist, binding_path)
2608
2609    if allowlist is not None:
2610        allowset = set(allowlist)
2611        to_del = [prop for prop in props if prop not in allowset]
2612    else:
2613        if TYPE_CHECKING:
2614            assert blocklist
2615        blockset = set(blocklist)
2616        to_del = [prop for prop in props if prop in blockset]
2617
2618    for prop in to_del:
2619        del props[prop]
2620
2621
2622def _check_prop_filter(name: str, value: Optional[List[str]],
2623                       binding_path: Optional[str]) -> None:
2624    # Ensure an include: ... property-allowlist or property-blocklist
2625    # is a list.
2626
2627    if value is None:
2628        return
2629
2630    if not isinstance(value, list):
2631        _err(f"'{name}' value {value} in {binding_path} should be a list")
2632
2633
2634def _merge_props(to_dict: dict,
2635                 from_dict: dict,
2636                 parent: Optional[str],
2637                 binding_path: Optional[str],
2638                 check_required: bool = False):
2639    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2640    #
2641    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2642    # property, then the values are ORed together.
2643    #
2644    # If 'check_required' is True, then an error is raised if 'from_dict' has
2645    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2646    # bindings from "downgrading" requirements from bindings they include,
2647    # which might help keep bindings well-organized.
2648    #
2649    # It's an error for most other keys to appear in both 'from_dict' and
2650    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2651    # precedence.
2652    #
2653    # 'parent' is the name of the parent key containing 'to_dict' and
2654    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2655    # These are used to generate errors for sketchy property overwrites.
2656
2657    for prop in from_dict:
2658        if isinstance(to_dict.get(prop), dict) and \
2659           isinstance(from_dict[prop], dict):
2660            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2661                         check_required)
2662        elif prop not in to_dict:
2663            to_dict[prop] = from_dict[prop]
2664        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2665            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2666                 f"from included file overwritten ('{from_dict[prop]}' "
2667                 f"replaced with '{to_dict[prop]}')")
2668        elif prop == "required":
2669            # Need a separate check here, because this code runs before
2670            # Binding._check()
2671            if not (isinstance(from_dict["required"], bool) and
2672                    isinstance(to_dict["required"], bool)):
2673                _err(f"malformed 'required:' setting for '{parent}' in "
2674                     f"'properties' in {binding_path}, expected true/false")
2675
2676            # 'required: true' takes precedence
2677            to_dict["required"] = to_dict["required"] or from_dict["required"]
2678
2679
2680def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2681                   check_required: bool) -> bool:
2682    # _merge_props() helper. Returns True in cases where it's bad that
2683    # to_dict[prop] takes precedence over from_dict[prop].
2684
2685    if to_dict[prop] == from_dict[prop]:
2686        return False
2687
2688    # These are overridden deliberately
2689    if prop in {"title", "description", "compatible"}:
2690        return False
2691
2692    if prop == "required":
2693        if not check_required:
2694            return False
2695        return from_dict[prop] and not to_dict[prop]
2696
2697    return True
2698
2699
2700def _binding_include(loader, node):
2701    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2702    # just becomes [foo, bar].
2703
2704    if isinstance(node, yaml.ScalarNode):
2705        # !include foo.yaml
2706        return [loader.construct_scalar(node)]
2707
2708    if isinstance(node, yaml.SequenceNode):
2709        # !include [foo.yaml, bar.yaml]
2710        return loader.construct_sequence(node)
2711
2712    _binding_inc_error("unrecognised node type in !include statement")
2713
2714
2715def _check_prop_by_type(prop_name: str,
2716                        options: dict,
2717                        binding_path: Optional[str]) -> None:
2718    # Binding._check_properties() helper. Checks 'type:', 'default:',
2719    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2720
2721    prop_type = options.get("type")
2722    default = options.get("default")
2723    const = options.get("const")
2724
2725    if prop_type is None:
2726        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2727             f"{binding_path}")
2728
2729    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2730                "string-array", "phandle", "phandles", "phandle-array",
2731                "path", "compound"}
2732
2733    if prop_type not in ok_types:
2734        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2735             f"has unknown type '{prop_type}', expected one of " +
2736             ", ".join(ok_types))
2737
2738    if "specifier-space" in options and prop_type != "phandle-array":
2739        _err(f"'specifier-space' in 'properties: {prop_name}' "
2740             f"has type '{prop_type}', expected 'phandle-array'")
2741
2742    if prop_type == "phandle-array":
2743        if not prop_name.endswith("s") and not "specifier-space" in options:
2744            _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2745                 f"has type 'phandle-array' and its name does not end in 's', "
2746                 f"but no 'specifier-space' was provided.")
2747
2748    # If you change const_types, be sure to update the type annotation
2749    # for PropertySpec.const.
2750    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2751    if const and prop_type not in const_types:
2752        _err(f"const in {binding_path} for property '{prop_name}' "
2753             f"has type '{prop_type}', expected one of " +
2754             ", ".join(const_types))
2755
2756    # Check default
2757
2758    if default is None:
2759        return
2760
2761    if prop_type in {"boolean", "compound", "phandle", "phandles",
2762                     "phandle-array", "path"}:
2763        _err("'default:' can't be combined with "
2764             f"'type: {prop_type}' for '{prop_name}' in "
2765             f"'properties:' in {binding_path}")
2766
2767    def ok_default() -> bool:
2768        # Returns True if 'default' is an okay default for the property's type.
2769        # If you change this, be sure to update the type annotation for
2770        # PropertySpec.default.
2771
2772        if prop_type == "int" and isinstance(default, int) or \
2773           prop_type == "string" and isinstance(default, str):
2774            return True
2775
2776        # array, uint8-array, or string-array
2777
2778        if not isinstance(default, list):
2779            return False
2780
2781        if prop_type == "array" and \
2782           all(isinstance(val, int) for val in default):
2783            return True
2784
2785        if prop_type == "uint8-array" and \
2786           all(isinstance(val, int) and 0 <= val <= 255 for val in default):
2787            return True
2788
2789        # string-array
2790        return all(isinstance(val, str) for val in default)
2791
2792    if not ok_default():
2793        _err(f"'default: {default}' is invalid for '{prop_name}' "
2794             f"in 'properties:' in {binding_path}, "
2795             f"which has type {prop_type}")
2796
2797
2798def _translate(addr: int, node: dtlib_Node) -> int:
2799    # Recursively translates 'addr' on 'node' to the address space(s) of its
2800    # parent(s), by looking at 'ranges' properties. Returns the translated
2801    # address.
2802
2803    if not node.parent or "ranges" not in node.parent.props:
2804        # No translation
2805        return addr
2806
2807    if not node.parent.props["ranges"].value:
2808        # DT spec.: "If the property is defined with an <empty> value, it
2809        # specifies that the parent and child address space is identical, and
2810        # no address translation is required."
2811        #
2812        # Treat this the same as a 'range' that explicitly does a one-to-one
2813        # mapping, as opposed to there not being any translation.
2814        return _translate(addr, node.parent)
2815
2816    # Gives the size of each component in a translation 3-tuple in 'ranges'
2817    child_address_cells = _address_cells(node)
2818    parent_address_cells = _address_cells(node.parent)
2819    child_size_cells = _size_cells(node)
2820
2821    # Number of cells for one translation 3-tuple in 'ranges'
2822    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2823
2824    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2825                            f"4*(<#address-cells> (= {child_address_cells}) + "
2826                            "<#address-cells for parent> "
2827                            f"(= {parent_address_cells}) + "
2828                            f"<#size-cells> (= {child_size_cells}))"):
2829        child_addr = to_num(raw_range[:4*child_address_cells])
2830        raw_range = raw_range[4*child_address_cells:]
2831
2832        parent_addr = to_num(raw_range[:4*parent_address_cells])
2833        raw_range = raw_range[4*parent_address_cells:]
2834
2835        child_len = to_num(raw_range)
2836
2837        if child_addr <= addr < child_addr + child_len:
2838            # 'addr' is within range of a translation in 'ranges'. Recursively
2839            # translate it and return the result.
2840            return _translate(parent_addr + addr - child_addr, node.parent)
2841
2842    # 'addr' is not within range of any translation in 'ranges'
2843    return addr
2844
2845
2846def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2847    # Helper for registering names from <foo>-names properties.
2848    #
2849    # node:
2850    #   Node which has a property that might need named elements.
2851    #
2852    # names-ident:
2853    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2854    #
2855    # objs:
2856    #   list of objects whose .name field should be set
2857
2858    full_names_ident = names_ident + "-names"
2859
2860    if full_names_ident in node.props:
2861        names = node.props[full_names_ident].to_strings()
2862        if len(names) != len(objs):
2863            _err(f"{full_names_ident} property in {node.path} "
2864                 f"in {node.dt.filename} has {len(names)} strings, "
2865                 f"expected {len(objs)} strings")
2866
2867        for obj, name in zip(objs, names):
2868            if obj is None:
2869                continue
2870            obj.name = name
2871    else:
2872        for obj in objs:
2873            if obj is not None:
2874                obj.name = None
2875
2876
2877def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2878    # Returns the node pointed at by the closest 'interrupt-parent', searching
2879    # the parents of 'node'. As of writing, this behavior isn't specified in
2880    # the DT spec., but seems to match what some .dts files except.
2881
2882    node: Optional[dtlib_Node] = start_node
2883
2884    while node:
2885        if "interrupt-parent" in node.props:
2886            return node.props["interrupt-parent"].to_node()
2887        node = node.parent
2888
2889    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2890         f"nor any of its parents has an 'interrupt-parent' property")
2891
2892
2893def _interrupts(node: dtlib_Node) -> List[Tuple[dtlib_Node, bytes]]:
2894    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2895    # interrupt generated by 'node'. <controller> is the destination of the
2896    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2897    # the data associated with the interrupt (as a 'bytes' object).
2898
2899    # Takes precedence over 'interrupts' if both are present
2900    if "interrupts-extended" in node.props:
2901        prop = node.props["interrupts-extended"]
2902
2903        ret: List[Tuple[dtlib_Node, bytes]] = []
2904        for entry in _phandle_val_list(prop, "interrupt"):
2905            if entry is None:
2906                _err(f"node '{node.path}' interrupts-extended property "
2907                     "has an empty element")
2908            iparent, spec = entry
2909            ret.append(_map_interrupt(node, iparent, spec))
2910        return ret
2911
2912    if "interrupts" in node.props:
2913        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2914        # the same interrupt parent for all interrupts
2915
2916        iparent = _interrupt_parent(node)
2917        interrupt_cells = _interrupt_cells(iparent)
2918
2919        return [_map_interrupt(node, iparent, raw)
2920                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2921                                  "4*<#interrupt-cells>")]
2922
2923    return []
2924
2925
2926def _map_interrupt(
2927        child: dtlib_Node,
2928        parent: dtlib_Node,
2929        child_spec: bytes
2930) -> Tuple[dtlib_Node, bytes]:
2931    # Translates an interrupt headed from 'child' to 'parent' with data
2932    # 'child_spec' through any 'interrupt-map' properties. Returns a
2933    # (<controller>, <data>) tuple with the final destination after mapping.
2934
2935    if "interrupt-controller" in parent.props:
2936        return (parent, child_spec)
2937
2938    def own_address_cells(node):
2939        # Used for parents pointed at by 'interrupt-map'. We can't use
2940        # _address_cells(), because it's the #address-cells property on 'node'
2941        # itself that matters.
2942
2943        address_cells = node.props.get("#address-cells")
2944        if not address_cells:
2945            _err(f"missing #address-cells on {node!r} "
2946                 "(while handling interrupt-map)")
2947        return address_cells.to_num()
2948
2949    def spec_len_fn(node):
2950        # Can't use _address_cells() here, because it's the #address-cells
2951        # property on 'node' itself that matters
2952        return own_address_cells(node) + _interrupt_cells(node)
2953
2954    parent, raw_spec = _map(
2955        "interrupt", child, parent, _raw_unit_addr(child) + child_spec,
2956        spec_len_fn, require_controller=True)
2957
2958    # Strip the parent unit address part, if any
2959    return (parent, raw_spec[4*own_address_cells(parent):])
2960
2961
2962def _map_phandle_array_entry(
2963        child: dtlib_Node,
2964        parent: dtlib_Node,
2965        child_spec: bytes,
2966        basename: str
2967) -> Tuple[dtlib_Node, bytes]:
2968    # Returns a (<controller>, <data>) tuple with the final destination after
2969    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
2970    # _map_interrupt().
2971
2972    def spec_len_fn(node):
2973        prop_name = f"#{basename}-cells"
2974        if prop_name not in node.props:
2975            _err(f"expected '{prop_name}' property on {node!r} "
2976                 f"(referenced by {child!r})")
2977        return node.props[prop_name].to_num()
2978
2979    # Do not require <prefix>-controller for anything but interrupts for now
2980    return _map(basename, child, parent, child_spec, spec_len_fn,
2981                require_controller=False)
2982
2983
2984def _map(
2985        prefix: str,
2986        child: dtlib_Node,
2987        parent: dtlib_Node,
2988        child_spec: bytes,
2989        spec_len_fn: Callable[[dtlib_Node], int],
2990        require_controller: bool
2991) -> Tuple[dtlib_Node, bytes]:
2992    # Common code for mapping through <prefix>-map properties, e.g.
2993    # interrupt-map and gpio-map.
2994    #
2995    # prefix:
2996    #   The prefix, e.g. "interrupt" or "gpio"
2997    #
2998    # child:
2999    #   The "sender", e.g. the node with 'interrupts = <...>'
3000    #
3001    # parent:
3002    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
3003    #   'interrupt-controller' (no mapping)
3004    #
3005    # child_spec:
3006    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
3007    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
3008    #
3009    # spec_len_fn:
3010    #   Function called on a parent specified in a *-map property to get the
3011    #   length of the parent specifier (data after phandle in *-map), in cells
3012    #
3013    # require_controller:
3014    #   If True, the final controller node after mapping is required to have
3015    #   to have a <prefix>-controller property.
3016
3017    map_prop = parent.props.get(prefix + "-map")
3018    if not map_prop:
3019        if require_controller and prefix + "-controller" not in parent.props:
3020            _err(f"expected '{prefix}-controller' property on {parent!r} "
3021                 f"(referenced by {child!r})")
3022
3023        # No mapping
3024        return (parent, child_spec)
3025
3026    masked_child_spec = _mask(prefix, child, parent, child_spec)
3027
3028    raw = map_prop.value
3029    while raw:
3030        if len(raw) < len(child_spec):
3031            _err(f"bad value for {map_prop!r}, missing/truncated child data")
3032        child_spec_entry = raw[:len(child_spec)]
3033        raw = raw[len(child_spec):]
3034
3035        if len(raw) < 4:
3036            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
3037        phandle = to_num(raw[:4])
3038        raw = raw[4:]
3039
3040        # Parent specified in *-map
3041        map_parent = parent.dt.phandle2node.get(phandle)
3042        if not map_parent:
3043            _err(f"bad phandle ({phandle}) in {map_prop!r}")
3044
3045        map_parent_spec_len = 4*spec_len_fn(map_parent)
3046        if len(raw) < map_parent_spec_len:
3047            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
3048        parent_spec = raw[:map_parent_spec_len]
3049        raw = raw[map_parent_spec_len:]
3050
3051        # Got one *-map row. Check if it matches the child data.
3052        if child_spec_entry == masked_child_spec:
3053            # Handle *-map-pass-thru
3054            parent_spec = _pass_thru(
3055                prefix, child, parent, child_spec, parent_spec)
3056
3057            # Found match. Recursively map and return it.
3058            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
3059                        require_controller)
3060
3061    _err(f"child specifier for {child!r} ({child_spec!r}) "
3062         f"does not appear in {map_prop!r}")
3063
3064
3065def _mask(
3066        prefix: str,
3067        child: dtlib_Node,
3068        parent: dtlib_Node,
3069        child_spec: bytes
3070) -> bytes:
3071    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
3072    # See _map() for the parameters.
3073
3074    mask_prop = parent.props.get(prefix + "-map-mask")
3075    if not mask_prop:
3076        # No mask
3077        return child_spec
3078
3079    mask = mask_prop.value
3080    if len(mask) != len(child_spec):
3081        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
3082             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
3083
3084    return _and(child_spec, mask)
3085
3086
3087def _pass_thru(
3088        prefix: str,
3089        child: dtlib_Node,
3090        parent: dtlib_Node,
3091        child_spec: bytes,
3092        parent_spec: bytes
3093) -> bytes:
3094    # Common code for handling <prefix>-map-thru properties, e.g.
3095    # interrupt-pass-thru.
3096    #
3097    # parent_spec:
3098    #   The parent data from the matched entry in the <prefix>-map property
3099    #
3100    # See _map() for the other parameters.
3101
3102    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
3103    if not pass_thru_prop:
3104        # No pass-thru
3105        return parent_spec
3106
3107    pass_thru = pass_thru_prop.value
3108    if len(pass_thru) != len(child_spec):
3109        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
3110             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
3111
3112    res = _or(_and(child_spec, pass_thru),
3113              _and(parent_spec, _not(pass_thru)))
3114
3115    # Truncate to length of parent spec.
3116    return res[-len(parent_spec):]
3117
3118
3119def _raw_unit_addr(node: dtlib_Node) -> bytes:
3120    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
3121    # #address-cells) as a raw 'bytes'
3122
3123    if 'reg' not in node.props:
3124        _err(f"{node!r} lacks 'reg' property "
3125             "(needed for 'interrupt-map' unit address lookup)")
3126
3127    addr_len = 4*_address_cells(node)
3128
3129    if len(node.props['reg'].value) < addr_len:
3130        _err(f"{node!r} has too short 'reg' property "
3131             "(while doing 'interrupt-map' unit address lookup)")
3132
3133    return node.props['reg'].value[:addr_len]
3134
3135
3136def _and(b1: bytes, b2: bytes) -> bytes:
3137    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
3138    # with ones on the left if the lengths are not equal.
3139
3140    # Pad on the left, to equal length
3141    maxlen = max(len(b1), len(b2))
3142    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
3143                                       b2.rjust(maxlen, b'\xff')))
3144
3145
3146def _or(b1: bytes, b2: bytes) -> bytes:
3147    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3148    # zeros on the left if the lengths are not equal.
3149
3150    # Pad on the left, to equal length
3151    maxlen = max(len(b1), len(b2))
3152    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3153                                       b2.rjust(maxlen, b'\x00')))
3154
3155
3156def _not(b: bytes) -> bytes:
3157    # Returns the bitwise not of the 'bytes' object 'b'
3158
3159    # ANDing with 0xFF avoids negative numbers
3160    return bytes(~x & 0xFF for x in b)
3161
3162
3163def _phandle_val_list(
3164        prop: dtlib_Property,
3165        n_cells_name: str
3166) -> List[Optional[Tuple[dtlib_Node, bytes]]]:
3167    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3168    # cells that make up each <value> is derived from the node pointed at by
3169    # the preceding <phandle>.
3170    #
3171    # prop:
3172    #   dtlib.Property with value to parse
3173    #
3174    # n_cells_name:
3175    #   The <name> part of the #<name>-cells property to look for on the nodes
3176    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3177    #
3178    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3179    # is the node pointed at by <phandle>. If <phandle> does not refer
3180    # to a node, the entire list element is None.
3181
3182    full_n_cells_name = f"#{n_cells_name}-cells"
3183
3184    res: List[Optional[Tuple[dtlib_Node, bytes]]] = []
3185
3186    raw = prop.value
3187    while raw:
3188        if len(raw) < 4:
3189            # Not enough room for phandle
3190            _err("bad value for " + repr(prop))
3191        phandle = to_num(raw[:4])
3192        raw = raw[4:]
3193
3194        node = prop.node.dt.phandle2node.get(phandle)
3195        if not node:
3196            # Unspecified phandle-array element. This is valid; a 0
3197            # phandle value followed by no cells is an empty element.
3198            res.append(None)
3199            continue
3200
3201        if full_n_cells_name not in node.props:
3202            _err(f"{node!r} lacks {full_n_cells_name}")
3203
3204        n_cells = node.props[full_n_cells_name].to_num()
3205        if len(raw) < 4*n_cells:
3206            _err("missing data after phandle in " + repr(prop))
3207
3208        res.append((node, raw[:4*n_cells]))
3209        raw = raw[4*n_cells:]
3210
3211    return res
3212
3213
3214def _address_cells(node: dtlib_Node) -> int:
3215    # Returns the #address-cells setting for 'node', giving the number of <u32>
3216    # cells used to encode the address in the 'reg' property
3217    if TYPE_CHECKING:
3218        assert node.parent
3219
3220    if "#address-cells" in node.parent.props:
3221        return node.parent.props["#address-cells"].to_num()
3222    return 2  # Default value per DT spec.
3223
3224
3225def _size_cells(node: dtlib_Node) -> int:
3226    # Returns the #size-cells setting for 'node', giving the number of <u32>
3227    # cells used to encode the size in the 'reg' property
3228    if TYPE_CHECKING:
3229        assert node.parent
3230
3231    if "#size-cells" in node.parent.props:
3232        return node.parent.props["#size-cells"].to_num()
3233    return 1  # Default value per DT spec.
3234
3235
3236def _interrupt_cells(node: dtlib_Node) -> int:
3237    # Returns the #interrupt-cells property value on 'node', erroring out if
3238    # 'node' has no #interrupt-cells property
3239
3240    if "#interrupt-cells" not in node.props:
3241        _err(f"{node!r} lacks #interrupt-cells")
3242    return node.props["#interrupt-cells"].to_num()
3243
3244
3245def _slice(node: dtlib_Node,
3246           prop_name: str,
3247           size: int,
3248           size_hint: str) -> List[bytes]:
3249    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3250
3251
3252def _check_dt(dt: DT) -> None:
3253    # Does devicetree sanity checks. dtlib is meant to be general and
3254    # anything-goes except for very special properties like phandle, but in
3255    # edtlib we can be pickier.
3256
3257    # Check that 'status' has one of the values given in the devicetree spec.
3258
3259    # Accept "ok" for backwards compatibility
3260    ok_status = {"ok", "okay", "disabled", "reserved", "fail", "fail-sss"}
3261
3262    for node in dt.node_iter():
3263        if "status" in node.props:
3264            try:
3265                status_val = node.props["status"].to_string()
3266            except DTError as e:
3267                # The error message gives the path
3268                _err(str(e))
3269
3270            if status_val not in ok_status:
3271                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3272                     f"in {node.dt.filename}, expected one of " +
3273                     ", ".join(ok_status) +
3274                     " (see the devicetree specification)")
3275
3276        ranges_prop = node.props.get("ranges")
3277        if ranges_prop:
3278            if ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3279                _err(f"expected 'ranges = < ... >;' in {node.path} in "
3280                     f"{node.dt.filename}, not '{ranges_prop}' "
3281                     "(see the devicetree specification)")
3282
3283
3284def _err(msg) -> NoReturn:
3285    raise EDTError(msg)
3286
3287# Logging object
3288_LOG = logging.getLogger(__name__)
3289
3290# Regular expression for non-alphanumeric-or-underscore characters.
3291_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3292
3293
3294def str_as_token(val: str) -> str:
3295    """Return a canonical representation of a string as a C token.
3296
3297    This converts special characters in 'val' to underscores, and
3298    returns the result."""
3299
3300    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3301
3302
3303# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3304# which could interfere with YAML loading in clients
3305class _BindingLoader(Loader):
3306    pass
3307
3308
3309# Add legacy '!include foo.yaml' handling
3310_BindingLoader.add_constructor("!include", _binding_include)
3311
3312#
3313# "Default" binding for properties which are defined by the spec.
3314#
3315# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3316# updating the documentation for the DT_PROP() macro in
3317# include/devicetree.h.
3318#
3319
3320_DEFAULT_PROP_TYPES: Dict[str, str] = {
3321    "compatible": "string-array",
3322    "status": "string",
3323    "ranges": "compound",  # NUMS or EMPTY
3324    "reg": "array",
3325    "reg-names": "string-array",
3326    "label": "string",
3327    "interrupts": "array",
3328    "interrupts-extended": "compound",
3329    "interrupt-names": "string-array",
3330    "interrupt-controller": "boolean",
3331}
3332
3333_STATUS_ENUM: List[str] = "ok okay disabled reserved fail fail-sss".split()
3334
3335def _raw_default_property_for(
3336        name: str
3337) -> Dict[str, Union[str, bool, List[str]]]:
3338    ret: Dict[str, Union[str, bool, List[str]]] = {
3339        'type': _DEFAULT_PROP_TYPES[name],
3340        'required': False,
3341    }
3342    if name == 'status':
3343        ret['enum'] = _STATUS_ENUM
3344    return ret
3345
3346_DEFAULT_PROP_BINDING: Binding = Binding(
3347    None, {},
3348    raw={
3349        'properties': {
3350            name: _raw_default_property_for(name)
3351            for name in _DEFAULT_PROP_TYPES
3352        },
3353    },
3354    require_compatible=False, require_description=False,
3355)
3356
3357_DEFAULT_PROP_SPECS: Dict[str, PropertySpec] = {
3358    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3359    for name in _DEFAULT_PROP_TYPES
3360}
3361