1# Copyright (c) 2020 Nordic Semiconductor ASA
2# SPDX-License-Identifier: Apache-2.0
3
4"""
5Like gen_kconfig_rest.py, but for generating an index of existing
6devicetree bindings.
7"""
8
9import argparse
10import glob
11import io
12import logging
13import os
14import pprint
15import re
16import sys
17import textwrap
18from collections import defaultdict
19from pathlib import Path
20
21import gen_helpers
22from devicetree import edtlib
23
24ZEPHYR_BASE = Path(__file__).parents[2]
25
26GENERIC_OR_VENDOR_INDEPENDENT = 'Generic or vendor-independent'
27UNKNOWN_VENDOR = 'Unknown vendor'
28
29ZEPHYR_BASE = Path(__file__).parents[2]
30
31# Base properties that have documentation in 'dt-important-props'.
32DETAILS_IN_IMPORTANT_PROPS = set('compatible label reg status interrupts'.split())
33
34logger = logging.getLogger('gen_devicetree_rest')
35
36class VndLookup:
37    """
38    A convenience class for looking up information based on a
39    devicetree compatible's vendor prefix 'vnd'.
40    """
41
42    def __init__(self, vendor_prefixes, bindings):
43        self.vnd2vendor = self.load_vnd2vendor(vendor_prefixes)
44        self.vnd2bindings = self.init_vnd2bindings(bindings)
45        self.vnd2ref_target = self.init_vnd2ref_target()
46
47    def vendor(self, vnd):
48        return self.vnd2vendor.get(vnd, UNKNOWN_VENDOR)
49
50    def bindings(self, vnd, default=None):
51        return self.vnd2bindings.get(vnd, default)
52
53    def target(self, vnd):
54        return self.vnd2ref_target.get(
55            vnd, self.vnd2ref_target[(UNKNOWN_VENDOR,)])
56
57    @staticmethod
58    def load_vnd2vendor(vendor_prefixes):
59        # Load the vendor-prefixes.txt file. Return a dict mapping 'vnd'
60        # vendor prefixes as they are found in compatible properties to
61        # each vendor's full name.
62        #
63        # For example, this line:
64        #
65        #    vnd	A stand-in for a real vendor
66        #
67        # Gets split into a key 'vnd' and a value 'A stand-in for a real
68        # vendor' in the return value.
69        #
70        # The 'None' key maps to GENERIC_OR_VENDOR_INDEPENDENT.
71
72        vnd2vendor = {
73            None: GENERIC_OR_VENDOR_INDEPENDENT,
74        }
75        vnd2vendor.update(edtlib.load_vendor_prefixes_txt(vendor_prefixes))
76
77        logger.info('found %d vendor prefixes in %s', len(vnd2vendor) - 1,
78                    vendor_prefixes)
79        if logger.isEnabledFor(logging.DEBUG):
80            logger.debug('vnd2vendor=%s', pprint.pformat(vnd2vendor))
81
82        return vnd2vendor
83
84    def init_vnd2bindings(self, bindings):
85        # Take a 'vnd2vendor' map and a list of bindings and return a dict
86        # mapping 'vnd' vendor prefixes prefixes to lists of bindings. The
87        # bindings in each list are sorted by compatible. The keys in the
88        # return value are sorted by vendor name.
89        #
90        # Special cases:
91        #
92        # - The 'None' key maps to bindings with no vendor prefix
93        #   in their compatibles, like 'gpio-keys'. This is the first key.
94        # - The (UNKNOWN_VENDOR,) key maps to bindings whose compatible
95        #   has a vendor prefix that exists, but is not known,
96        #   like 'somethingrandom,device'. This is the last key.
97
98        # Get an unsorted dict mapping vendor prefixes to lists of bindings.
99        unsorted = defaultdict(list)
100        generic_bindings = []
101        unknown_vendor_bindings = []
102        for binding in bindings:
103            vnd = compatible_vnd(binding.compatible)
104            if vnd is None:
105                generic_bindings.append(binding)
106            elif vnd in self.vnd2vendor:
107                unsorted[vnd].append(binding)
108            else:
109                unknown_vendor_bindings.append(binding)
110
111        # Key functions for sorting.
112        def vnd_key(vnd):
113            return self.vnd2vendor[vnd].casefold()
114
115        def binding_key(binding):
116            return binding.compatible
117
118        # Sort the bindings for each vendor by compatible.
119        # Plain dicts are sorted in CPython 3.6+, which is what we
120        # support, so the return dict's keys are in the same
121        # order as vnd2vendor.
122        #
123        # The unknown-vendor bindings being inserted as a 1-tuple key is a
124        # hack for convenience that ensures they won't collide with a
125        # known vendor. The code that consumes the dict below handles
126        # this.
127        vnd2bindings = {
128            None: sorted(generic_bindings, key=binding_key)
129        }
130        for vnd in sorted(unsorted, key=vnd_key):
131            vnd2bindings[vnd] = sorted(unsorted[vnd], key=binding_key)
132        vnd2bindings[(UNKNOWN_VENDOR,)] = sorted(unknown_vendor_bindings,
133                                                      key=binding_key)
134
135        if logger.isEnabledFor(logging.DEBUG):
136            logger.debug('vnd2bindings: %s', pprint.pformat(vnd2bindings))
137
138        return vnd2bindings
139
140    def init_vnd2ref_target(self):
141        # The return value, vnd2ref_target, is a dict mapping vendor
142        # prefixes to ref targets for their relevant sections in this
143        # file, with these special cases:
144        #
145        # - The None key maps to the ref target for bindings with no
146        #   vendor prefix in their compatibles, like 'gpio-keys'
147        # - The (UNKNOWN_VENDOR,) key maps to the ref target for bindings
148        #   whose compatible has a vendor prefix that is not recognized.
149        vnd2ref_target = {}
150
151        for vnd in self.vnd2bindings:
152            if vnd is None:
153                vnd2ref_target[vnd] = 'dt_no_vendor'
154            elif isinstance(vnd, str):
155                vnd2ref_target[vnd] = f'dt_vendor_{vnd}'
156            else:
157                assert vnd == (UNKNOWN_VENDOR,), vnd
158                vnd2ref_target[vnd] = 'dt_unknown_vendor'
159
160        return vnd2ref_target
161
162def main():
163    args = parse_args()
164    setup_logging(args.verbose)
165    bindings = load_bindings(args.dts_roots, args.dts_folders)
166    base_binding = load_base_binding()
167    driver_sources = load_driver_sources()
168    vnd_lookup = VndLookup(args.vendor_prefixes, bindings)
169    dump_content(bindings, base_binding, vnd_lookup, driver_sources, args.out_dir,
170                 args.turbo_mode)
171
172def parse_args():
173    # Parse command line arguments from sys.argv.
174
175    parser = argparse.ArgumentParser(allow_abbrev=False)
176    parser.add_argument('-v', '--verbose', default=0, action='count',
177                        help='increase verbosity; may be given multiple times')
178    parser.add_argument('--vendor-prefixes', required=True,
179                        help='vendor-prefixes.txt file path')
180    parser.add_argument('--dts-root', dest='dts_roots', action='append',
181                        help='''additional DTS root directory as it would
182                        be set in DTS_ROOTS''')
183    parser.add_argument('--dts-folder', dest='dts_folders', action='append', default=[],
184                        help='additional DTS folders containing binding files')
185    parser.add_argument('--turbo-mode', action='store_true',
186                        help='Enable turbo mode (dummy references)')
187    parser.add_argument('out_dir', help='output files are generated here')
188
189    return parser.parse_args()
190
191def setup_logging(verbose):
192    if verbose >= 2:
193        log_level = logging.DEBUG
194    elif verbose == 1:
195        log_level = logging.INFO
196    else:
197        log_level = logging.ERROR
198    logging.basicConfig(format='%(filename)s:%(levelname)s: %(message)s',
199                        level=log_level)
200
201def load_bindings(dts_roots, dts_folders):
202    # Get a list of edtlib.Binding objects from searching 'dts_roots'.
203
204    if not dts_roots:
205        sys.exit('no DTS roots; use --dts-root to specify at least one')
206
207    binding_files = []
208    for dts_root in dts_roots:
209        binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yml',
210                                       recursive=True))
211        binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yaml',
212                                       recursive=True))
213    for folders in dts_folders:
214        binding_files.extend(glob.glob(f'{folders}/*.yml', recursive=False))
215        binding_files.extend(glob.glob(f'{folders}/*.yaml', recursive=False))
216
217    bindings = edtlib.bindings_from_paths(binding_files, ignore_errors=True)
218
219    num_total = len(bindings)
220
221    # Remove bindings from the 'vnd' vendor, which is not a real vendor,
222    # but rather a stand-in we use for examples and tests when a real
223    # vendor would be inappropriate.
224    bindings = [binding for binding in bindings if
225                compatible_vnd(binding.compatible) != 'vnd']
226
227    logger.info('found %d bindings (ignored %d) in this dts_roots list: %s',
228                len(bindings), num_total - len(bindings), dts_roots)
229
230    return bindings
231
232def load_base_binding():
233    # Make a Binding object for base.yaml.
234    #
235    # This helps separate presentation for properties common to all
236    # nodes from node-specific properties.
237
238    base_yaml = ZEPHYR_BASE / 'dts' / 'bindings' / 'base' / 'base.yaml'
239    base_includes = {"pm.yaml": os.fspath(ZEPHYR_BASE / 'dts' / 'bindings' / 'base'/ 'pm.yaml')}
240
241    if not base_yaml.is_file():
242        sys.exit(f'Expected to find base.yaml at {base_yaml}')
243    return edtlib.Binding(os.fspath(base_yaml), base_includes, require_compatible=False,
244                          require_description=False)
245
246def load_driver_sources():
247    driver_sources = {}
248    dt_drv_compat_occurrences = defaultdict(list)
249
250    dt_drv_compat_pattern = re.compile(r"#define DT_DRV_COMPAT\s+(.*)")
251    device_dt_inst_define_pattern = re.compile(r"DEVICE_DT_INST_DEFINE")
252
253    folders_to_scan = ["boards", "drivers", "modules", "soc", "subsys"]
254
255    # When looking at folders_to_scan, a file is considered as a likely driver source if:
256    # - There is only one and only one file with a "#define DT_DRV_COMPAT <compatible>" for a given
257    #   compatible.
258    # - or, a file contains both a "#define DT_DRV_COMPAT <compatible>" and a
259    #   DEVICE_DT_INST_DEFINE(...) call.
260
261    for folder in folders_to_scan:
262        for dirpath, _, filenames in os.walk(ZEPHYR_BASE / folder):
263            for filename in filenames:
264                if not filename.endswith(('.c', '.h')):
265                    continue
266                filepath = Path(dirpath) / filename
267                with open(filepath, encoding="utf-8") as f:
268                    content = f.read()
269
270                relative_path = filepath.relative_to(ZEPHYR_BASE)
271
272                # Find all DT_DRV_COMPAT occurrences in the file
273                dt_drv_compat_matches = dt_drv_compat_pattern.findall(content)
274                for compatible in dt_drv_compat_matches:
275                    dt_drv_compat_occurrences[compatible].append(relative_path)
276
277                if dt_drv_compat_matches and device_dt_inst_define_pattern.search(content):
278                    for compatible in dt_drv_compat_matches:
279                        if compatible in driver_sources:
280                            # Mark as ambiguous if multiple files define the same compatible
281                            driver_sources[compatible] = None
282                        else:
283                            driver_sources[compatible] = relative_path
284
285    # Remove ambiguous driver sources
286    driver_sources = {k: v for k, v in driver_sources.items() if v is not None}
287
288    # Consider DT_DRV_COMPATs with only one occurrence as driver sources
289    for compatible, occurrences in dt_drv_compat_occurrences.items():
290        if compatible not in driver_sources and len(occurrences) == 1:
291            path = occurrences[0]
292            # Assume the driver is defined in the enclosing folder if it's a header file
293            if path.suffix == ".h":
294                path = path.parent
295            driver_sources[compatible] = path
296
297    return driver_sources
298
299def dump_content(bindings, base_binding, vnd_lookup, driver_sources, out_dir, turbo_mode):
300    # Dump the generated .rst files for a vnd2bindings dict.
301    # Files are only written if they are changed. Existing .rst
302    # files which would not be written by the 'vnd2bindings'
303    # dict are removed.
304
305    out_dir = Path(out_dir)
306
307    setup_bindings_dir(bindings, out_dir)
308    if turbo_mode:
309        write_dummy_index(bindings, out_dir)
310    else:
311        write_bindings_rst(vnd_lookup, out_dir)
312        write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir)
313
314def setup_bindings_dir(bindings, out_dir):
315    # Make a set of all the Path objects we will be creating for
316    # out_dir / bindings / {binding_path}.rst. Delete all the ones that
317    # shouldn't be there. Make sure the bindings output directory
318    # exists.
319
320    paths = set()
321    bindings_dir = out_dir / 'bindings'
322    logger.info('making output subdirectory %s', bindings_dir)
323    bindings_dir.mkdir(parents=True, exist_ok=True)
324
325    for binding in bindings:
326        paths.add(bindings_dir / binding_filename(binding))
327
328    for dirpath, _, filenames in os.walk(bindings_dir):
329        for filename in filenames:
330            path = Path(dirpath) / filename
331            if path not in paths:
332                logger.info('removing unexpected file %s', path)
333                path.unlink()
334
335
336def write_dummy_index(bindings, out_dir):
337    # Write out_dir / bindings.rst, with dummy anchors
338
339    # header
340    content = '\n'.join((
341        '.. _devicetree_binding_index:',
342        '.. _dt_vendor_zephyr:',
343        '',
344        'Dummy bindings index',
345        '####################',
346        '',
347    ))
348
349    # build compatibles set and dump it
350    compatibles = {binding.compatible for binding in bindings}
351    content += '\n'.join(
352        f'.. dtcompatible:: {compatible}' for compatible in compatibles
353    )
354
355    write_if_updated(out_dir / 'bindings.rst', content)
356
357
358def write_bindings_rst(vnd_lookup, out_dir):
359    # Write out_dir / bindings.rst, the top level index of bindings.
360
361    string_io = io.StringIO()
362
363    print_block(f'''\
364    .. _devicetree_binding_index:
365
366    Bindings index
367    ##############
368
369    This page documents the available devicetree bindings.
370    See {zref('dt-bindings')} for an introduction to the Zephyr bindings
371    file format.
372
373    Vendor index
374    ************
375
376    This section contains an index of hardware vendors.
377    Click on a vendor's name to go to the list of bindings for
378    that vendor.
379
380    .. rst-class:: rst-columns
381    ''', string_io)
382
383    for vnd, bindings in vnd_lookup.vnd2bindings.items():
384        if len(bindings) == 0:
385            continue
386        print(f'- :ref:`{vnd_lookup.target(vnd)}`', file=string_io)
387
388    print_block('''\
389
390    Bindings by vendor
391    ******************
392
393    This section contains available bindings, grouped by vendor.
394    Within each group, bindings are listed by the "compatible" property
395    they apply to, like this:
396
397    **Vendor name (vendor prefix)**
398
399    .. rst-class:: rst-columns
400
401    - <compatible-A>
402    - <compatible-B> (on <bus-name> bus)
403    - <compatible-C>
404    - ...
405
406    The text "(on <bus-name> bus)" appears when bindings may behave
407    differently depending on the bus the node appears on.
408    For example, this applies to some sensor device nodes, which may
409    appear as children of either I2C or SPI bus nodes.
410    ''', string_io)
411
412    for vnd, bindings in vnd_lookup.vnd2bindings.items():
413        if isinstance(vnd, tuple):
414            title = vnd[0]
415        else:
416            title = vnd_lookup.vendor(vnd).strip()
417            if isinstance(vnd, str):
418                title += f' ({vnd})'
419        underline = '=' * len(title)
420
421        if len(bindings) == 0:
422            continue
423
424        print_block(f'''\
425        .. _{vnd_lookup.target(vnd)}:
426
427        {title}
428        {underline}
429
430        .. rst-class:: rst-columns
431        ''', string_io)
432        for binding in bindings:
433            print(f'- :ref:`{binding_ref_target(binding)}`', file=string_io)
434        print(file=string_io)
435
436    write_if_updated(out_dir / 'bindings.rst', string_io.getvalue())
437
438def write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir):
439    # Write out_dir / bindings / foo / binding_page.rst for each binding
440    # in 'bindings', along with any "disambiguation" pages needed when a
441    # single compatible string can be handled by multiple bindings.
442    #
443    # These files are 'orphans' in the Sphinx sense: they are not in
444    # any toctree.
445
446    logging.info('updating :orphan: files for %d bindings', len(bindings))
447    num_written = 0
448
449    # First, figure out which compatibles map to multiple bindings. We
450    # need this information to decide which of the generated files for
451    # a compatible are "disambiguation" pages that point to per-bus
452    # binding pages, and which ones aren't.
453
454    compat2bindings = defaultdict(list)
455    for binding in bindings:
456        compat2bindings[binding.compatible].append(binding)
457    dup_compat2bindings = {compatible: bindings for compatible, bindings
458                           in compat2bindings.items() if len(bindings) > 1}
459
460    # Next, write the per-binding pages. These contain the
461    # per-compatible targets for compatibles not in 'dup_compats'.
462    # We'll finish up by writing per-compatible "disambiguation" pages
463    # for compatibles in 'dup_compats'.
464
465    # Names of properties in base.yaml.
466    base_names = set(base_binding.prop2specs.keys())
467    for binding in bindings:
468        string_io = io.StringIO()
469
470        print_binding_page(binding, base_names, vnd_lookup,
471                           driver_sources, dup_compat2bindings, string_io)
472
473        written = write_if_updated(out_dir / 'bindings' /
474                                   binding_filename(binding),
475                                   string_io.getvalue())
476
477        if written:
478            num_written += 1
479
480    # Generate disambiguation pages for dup_compats.
481    compatibles_dir = out_dir / 'compatibles'
482    setup_compatibles_dir(dup_compat2bindings.keys(), compatibles_dir)
483    for compatible in dup_compat2bindings:
484        string_io = io.StringIO()
485
486        print_compatible_disambiguation_page(
487            compatible, dup_compat2bindings[compatible], string_io)
488
489        written = write_if_updated(compatibles_dir /
490                                   compatible_filename(compatible),
491                                   string_io.getvalue())
492
493        if written:
494            num_written += 1
495
496    logging.info('done writing :orphan: files; %d files needed updates',
497                 num_written)
498
499def print_binding_page(binding, base_names, vnd_lookup, driver_sources,dup_compats,
500                       string_io):
501    # Print the rst content for 'binding' to 'string_io'. The
502    # 'dup_compats' argument should support membership testing for
503    # compatibles which have multiple associated bindings; if
504    # 'binding.compatible' is not in it, then the ref target for the
505    # entire compatible is generated in this page as well.
506
507    # :orphan:
508    #
509    # .. ref_target:
510    #
511    # Title [(on <bus> bus)]
512    # ######################
513    if binding.on_bus:
514        on_bus_title = f' (on {binding.on_bus} bus)'
515    else:
516        on_bus_title = ''
517    compatible = binding.compatible
518
519    title = f'{compatible}{on_bus_title}'
520    underline = '#' * len(title)
521    if compatible not in dup_compats:
522        # If this binding is the only one that handles this
523        # compatible, point the ".. dtcompatible:" directive straight
524        # to this page. There's no need for disambiguation.
525        dtcompatible = f'.. dtcompatible:: {binding.compatible}'
526    else:
527        # This compatible is handled by multiple bindings;
528        # its ".. dtcompatible::" should be in a disambiguation page
529        # instead.
530        dtcompatible = ''
531
532    print_block(f'''\
533    :orphan:
534
535    .. raw:: html
536
537        <!--
538        FIXME: do not limit page width until content uses another representation
539        format other than tables
540        -->
541        <style>.wy-nav-content {{ max-width: none; !important }}</style>
542
543    {dtcompatible}
544    .. _{binding_ref_target(binding)}:
545
546    {title}
547    {underline}
548    ''', string_io)
549
550    # Vendor: <link-to-vendor-section>
551    vnd = compatible_vnd(compatible)
552    print('Vendor: '
553          f':ref:`{vnd_lookup.vendor(vnd)} <{vnd_lookup.target(vnd)}>`\n',
554          file=string_io)
555
556    # Link to driver implementation (if it exists).
557    compatible = re.sub("[-,.@/+]", "_", compatible.lower())
558    if compatible in driver_sources:
559        print_block(
560            f"""\
561            .. note::
562
563               An implementation of a driver matching this compatible is available in
564               :zephyr_file:`{driver_sources[compatible]}`.
565        """,
566            string_io,
567        )
568
569    # Binding description.
570    if binding.bus:
571        bus_help = f'These nodes are "{binding.bus}" bus nodes.'
572    else:
573        bus_help = ''
574    print_block(f'''\
575    Description
576    ***********
577
578    {bus_help}
579    ''', string_io)
580    print(to_code_block(binding.description.strip()), file=string_io)
581
582    # Properties.
583    print_block('''\
584    Properties
585    **********
586    ''', string_io)
587    print_top_level_properties(binding, base_names, string_io)
588    print_child_binding_properties(binding, string_io)
589
590    # Specifier cells.
591    #
592    # This presentation isn't particularly nice. Perhaps something
593    # better can be done for future work.
594    if binding.specifier2cells:
595        print_block('''\
596        Specifier cell names
597        ********************
598        ''', string_io)
599        for specifier, cells in binding.specifier2cells.items():
600            print(f'- {specifier} cells: {", ".join(cells)}',
601                  file=string_io)
602
603def print_top_level_properties(binding, base_names, string_io):
604    # Print the RST for top level properties for 'binding' to 'string_io'.
605    #
606    # The 'base_names' set contains all the base.yaml properties.
607
608    def prop_table(filter_fn, deprecated):
609        # Get a properly formatted and indented table of properties.
610        specs = [prop_spec for prop_spec in binding.prop2specs.values()
611                 if filter_fn(prop_spec)]
612        indent = ' ' * 14
613        if specs:
614            temp_io = io.StringIO()
615            print_property_table(specs, temp_io, deprecated=deprecated)
616            return textwrap.indent(temp_io.getvalue(), indent)
617
618        return indent + '(None)'
619
620    def node_props_filter(prop_spec):
621        return prop_spec.name not in base_names and not prop_spec.deprecated
622
623    def deprecated_node_props_filter(prop_spec):
624        return prop_spec.name not in base_names and prop_spec.deprecated
625
626    def base_props_filter(prop_spec):
627        return prop_spec.name in base_names
628
629    if binding.child_binding:
630        print_block('''\
631        Top level properties
632        ====================
633        ''', string_io)
634    if binding.prop2specs:
635        if binding.child_binding:
636            print_block(f'''
637            These property descriptions apply to "{binding.compatible}"
638            nodes themselves. This page also describes child node
639            properties in the following sections.
640            ''', string_io)
641
642
643        print_block(f'''\
644        .. tabs::
645
646           .. group-tab:: Node specific properties
647
648              Properties not inherited from the base binding file.
649
650{prop_table(node_props_filter, False)}
651
652           .. group-tab:: Deprecated node specific properties
653
654              Deprecated properties not inherited from the base binding file.
655
656{prop_table(deprecated_node_props_filter, False)}
657
658           .. group-tab:: Base properties
659
660              Properties inherited from the base binding file, which defines
661              common properties that may be set on many nodes. Not all of these
662              may apply to the "{binding.compatible}" compatible.
663
664{prop_table(base_props_filter, True)}
665
666        ''', string_io)
667    else:
668        print('No top-level properties.\n', file=string_io)
669
670def print_child_binding_properties(binding, string_io):
671    # Prints property tables for all levels of nesting of child
672    # bindings.
673
674    level = 1
675    child = binding.child_binding
676    while child is not None:
677        if level == 1:
678            level_string = 'Child'
679        elif level == 2:
680            level_string = 'Grandchild'
681        else:
682            level_string = f'Level {level} child'
683        if child.prop2specs:
684            title = f'{level_string} node properties'
685            underline = '=' * len(title)
686            print(f'{title}\n{underline}\n', file=string_io)
687            print_property_table(child.prop2specs.values(), string_io,
688                                 deprecated=True)
689        child = child.child_binding
690        level += 1
691
692def print_property_table(prop_specs, string_io, deprecated=False):
693    # Writes a table of properties based on 'prop_specs', an iterable
694    # of edtlib.PropertySpec objects, to 'string_io'.
695    #
696    # If 'deprecated' is true and the property is deprecated, an extra
697    # line is printed mentioning that fact. We allow this to be turned
698    # off for tables where all properties are deprecated, so it's
699    # clear from context.
700
701    # Table header.
702    print_block('''\
703    .. list-table::
704       :widths: 1 1 4
705       :header-rows: 1
706
707       * - Name
708         - Type
709         - Details
710    ''', string_io)
711
712    def to_prop_table_row(prop_spec):
713        # Get a multiline string for a PropertySpec table row.
714
715        # The description column combines the description field,
716        # along with things like the default value or enum values.
717        #
718        # The property 'description' field from the binding may span
719        # one or multiple lines. We try to come up with a nice
720        # presentation for each.
721        details = ''
722        raw_prop_descr = prop_spec.description
723        if raw_prop_descr:
724            details += to_code_block(raw_prop_descr)
725
726        if prop_spec.required:
727            details += '\n\nThis property is **required**.'
728
729        if prop_spec.default:
730            details += f'\n\nDefault value: ``{prop_spec.default}``'
731
732        if prop_spec.const:
733            details += f'\n\nConstant value: ``{prop_spec.const}``'
734        elif prop_spec.enum:
735            details += ('\n\nLegal values: ' +
736                        ', '.join(f'``{repr(val)}``' for val in
737                                  prop_spec.enum))
738
739        if prop_spec.name in DETAILS_IN_IMPORTANT_PROPS:
740            details += (f'\n\nSee {zref("dt-important-props")} for more '
741                        'information.')
742
743        if deprecated and prop_spec.deprecated:
744            details += '\n\nThis property is **deprecated**.'
745
746        return f"""\
747   * - ``{prop_spec.name}``
748     - ``{prop_spec.type}``
749     - {textwrap.indent(details, ' ' * 7).lstrip()}
750"""
751
752    # Print each row.
753    for prop_spec in prop_specs:
754        print(to_prop_table_row(prop_spec), file=string_io)
755
756def setup_compatibles_dir(compatibles, compatibles_dir):
757    # Make a set of all the Path objects we will be creating for
758    # out_dir / compatibles / {compatible_path}.rst. Delete all the ones that
759    # shouldn't be there. Make sure the compatibles output directory
760    # exists.
761
762    logger.info('making output subdirectory %s', compatibles_dir)
763    compatibles_dir.mkdir(parents=True, exist_ok=True)
764
765    paths = set(compatibles_dir / compatible_filename(compatible)
766                for compatible in compatibles)
767
768    for path in compatibles_dir.iterdir():
769        if path not in paths:
770            logger.info('removing unexpected file %s', path)
771            path.unlink()
772
773
774def print_compatible_disambiguation_page(compatible, bindings, string_io):
775    # Print the disambiguation page for 'compatible', which can be
776    # handled by any of the bindings in 'bindings', to 'string_io'.
777
778    assert len(bindings) > 1, (compatible, bindings)
779
780    underline = '#' * len(compatible)
781    output_list = '\n    '.join(f'- :ref:`{binding_ref_target(binding)}`'
782                                for binding in bindings)
783
784    print_block(f'''\
785    :orphan:
786
787    .. dtcompatible:: {compatible}
788
789    {compatible}
790    {underline}
791
792    The devicetree compatible ``{compatible}`` may be handled by any
793    of the following bindings:
794
795    {output_list}
796    ''', string_io)
797
798def print_block(block, string_io):
799    # Helper for dedenting and printing a triple-quoted RST block.
800    # (Just a block of text, not necessarily just a 'code-block'
801    # directive.)
802
803    print(textwrap.dedent(block), file=string_io)
804
805def to_code_block(s, indent=0):
806    # Converts 's', a string, to an indented rst .. code-block::. The
807    # 'indent' argument is a leading indent for each line in the code
808    # block, in spaces.
809    indent = indent * ' '
810    return ('.. code-block:: none\n\n' +
811            textwrap.indent(s, indent + '   ') + '\n')
812
813def compatible_vnd(compatible):
814    # Get the vendor prefix for a compatible string 'compatible'.
815    #
816    # For example, compatible_vnd('foo,device') is 'foo'.
817    #
818    # If 'compatible' has no comma (','), None is returned.
819
820    if ',' not in compatible:
821        return None
822
823    return compatible.split(',', 1)[0]
824
825def compatible_filename(compatible):
826    # Name of the per-compatible disambiguation page within the
827    # out_dir / compatibles directory.
828
829    return f'{compatible}.rst'
830
831def zref(target, text=None):
832    # Make an appropriate RST :ref:`text <target>` or :ref:`target`
833    # string to a zephyr documentation ref target 'target', and return
834    # it.
835    #
836    # By default, the bindings docs are in the main Zephyr
837    # documentation, but this script supports putting them in a
838    # separate Sphinx doc set. Since we also link to Zephyr
839    # documentation from the generated content, we have an environment
840    # variable based escape hatch for putting the target in the zephyr
841    # doc set.
842    #
843    # This relies on intersphinx:
844    # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
845
846    docset = os.environ.get('GEN_DEVICETREE_REST_ZEPHYR_DOCSET', '')
847
848    if docset.strip():
849        target = f'{docset}:{target}'
850
851    if text:
852        return f':ref:`{text} <{target}>`'
853
854    return f':ref:`{target}`'
855
856def binding_filename(binding):
857    # Returns the output file name for a binding relative to the
858    # directory containing documentation for all bindings. It does
859    # this by stripping off the '.../dts/bindings/' prefix common to
860    # all bindings files in a DTS_ROOT directory.
861    #
862    # For example, for .../zephyr/dts/bindings/base/base.yaml, this
863    # would return 'base/base.yaml'.
864    #
865    # Hopefully that's unique across roots. If not, we'll need to
866    # update this function.
867
868    as_posix = Path(binding.path).as_posix()
869    dts_bindings = 'dts/bindings/'
870    idx = as_posix.rfind(dts_bindings)
871
872    if idx == -1:
873        raise ValueError(f'binding path has no {dts_bindings}: {binding.path}')
874
875    # Cut past dts/bindings, strip off the extension (.yaml or .yml), and
876    # replace with .rst.
877    return os.path.splitext(as_posix[idx + len(dts_bindings):])[0] + '.rst'
878
879def binding_ref_target(binding):
880    # Return the sphinx ':ref:' target name for a binding.
881
882    stem = Path(binding.path).stem
883    return 'dtbinding_' + re.sub('[/,-]', '_', stem)
884
885def write_if_updated(path, s):
886    # gen_helpers.write_if_updated() wrapper that handles logging and
887    # creating missing parents, as needed.
888
889    if not path.parent.is_dir():
890        path.parent.mkdir(parents=True)
891    written = gen_helpers.write_if_updated(path, s)
892    logger.debug('%s %s', 'wrote' if written else 'did NOT write', path)
893    return written
894
895
896if __name__ == '__main__':
897    main()
898    sys.exit(0)
899