1# Copyright (c) 2020 Nordic Semiconductor ASA
2# SPDX-License-Identifier: Apache-2.0
3
4"""
5Like gen_kconfig_rest.py, but for generating an index of existing
6devicetree bindings.
7"""
8
9import argparse
10import glob
11import io
12import logging
13import os
14import pprint
15import re
16import sys
17import textwrap
18from collections import defaultdict
19from pathlib import Path
20
21import gen_helpers
22from devicetree import edtlib
23
24ZEPHYR_BASE = Path(__file__).parents[2]
25
26GENERIC_OR_VENDOR_INDEPENDENT = 'Generic or vendor-independent'
27UNKNOWN_VENDOR = 'Unknown vendor'
28
29ZEPHYR_BASE = Path(__file__).parents[2]
30
31# Base properties that have documentation in 'dt-important-props'.
32DETAILS_IN_IMPORTANT_PROPS = set('compatible label reg status interrupts'.split())
33
34logger = logging.getLogger('gen_devicetree_rest')
35
36class VndLookup:
37    """
38    A convenience class for looking up information based on a
39    devicetree compatible's vendor prefix 'vnd'.
40    """
41
42    def __init__(self, vendor_prefixes, bindings):
43        self.vnd2vendor = self.load_vnd2vendor(vendor_prefixes)
44        self.vnd2bindings = self.init_vnd2bindings(bindings)
45        self.vnd2ref_target = self.init_vnd2ref_target()
46
47    def vendor(self, vnd):
48        return self.vnd2vendor.get(vnd, UNKNOWN_VENDOR)
49
50    def bindings(self, vnd, default=None):
51        return self.vnd2bindings.get(vnd, default)
52
53    def target(self, vnd):
54        return self.vnd2ref_target.get(
55            vnd, self.vnd2ref_target[(UNKNOWN_VENDOR,)])
56
57    @staticmethod
58    def load_vnd2vendor(vendor_prefixes):
59        # Load the vendor-prefixes.txt file. Return a dict mapping 'vnd'
60        # vendor prefixes as they are found in compatible properties to
61        # each vendor's full name.
62        #
63        # For example, this line:
64        #
65        #    vnd	A stand-in for a real vendor
66        #
67        # Gets split into a key 'vnd' and a value 'A stand-in for a real
68        # vendor' in the return value.
69        #
70        # The 'None' key maps to GENERIC_OR_VENDOR_INDEPENDENT.
71
72        vnd2vendor = {
73            None: GENERIC_OR_VENDOR_INDEPENDENT,
74        }
75        vnd2vendor.update(edtlib.load_vendor_prefixes_txt(vendor_prefixes))
76
77        logger.info('found %d vendor prefixes in %s', len(vnd2vendor) - 1,
78                    vendor_prefixes)
79        if logger.isEnabledFor(logging.DEBUG):
80            logger.debug('vnd2vendor=%s', pprint.pformat(vnd2vendor))
81
82        return vnd2vendor
83
84    def init_vnd2bindings(self, bindings):
85        # Take a 'vnd2vendor' map and a list of bindings and return a dict
86        # mapping 'vnd' vendor prefixes prefixes to lists of bindings. The
87        # bindings in each list are sorted by compatible. The keys in the
88        # return value are sorted by vendor name.
89        #
90        # Special cases:
91        #
92        # - The 'None' key maps to bindings with no vendor prefix
93        #   in their compatibles, like 'gpio-keys'. This is the first key.
94        # - The (UNKNOWN_VENDOR,) key maps to bindings whose compatible
95        #   has a vendor prefix that exists, but is not known,
96        #   like 'somethingrandom,device'. This is the last key.
97
98        # Get an unsorted dict mapping vendor prefixes to lists of bindings.
99        unsorted = defaultdict(list)
100        generic_bindings = []
101        unknown_vendor_bindings = []
102        for binding in bindings:
103            vnd = compatible_vnd(binding.compatible)
104            if vnd is None:
105                generic_bindings.append(binding)
106            elif vnd in self.vnd2vendor:
107                unsorted[vnd].append(binding)
108            else:
109                unknown_vendor_bindings.append(binding)
110
111        # Key functions for sorting.
112        def vnd_key(vnd):
113            return self.vnd2vendor[vnd].casefold()
114
115        def binding_key(binding):
116            return binding.compatible
117
118        # Sort the bindings for each vendor by compatible.
119        # Plain dicts are sorted in CPython 3.6+, which is what we
120        # support, so the return dict's keys are in the same
121        # order as vnd2vendor.
122        #
123        # The unknown-vendor bindings being inserted as a 1-tuple key is a
124        # hack for convenience that ensures they won't collide with a
125        # known vendor. The code that consumes the dict below handles
126        # this.
127        vnd2bindings = {
128            None: sorted(generic_bindings, key=binding_key)
129        }
130        for vnd in sorted(unsorted, key=vnd_key):
131            vnd2bindings[vnd] = sorted(unsorted[vnd], key=binding_key)
132        vnd2bindings[(UNKNOWN_VENDOR,)] = sorted(unknown_vendor_bindings,
133                                                      key=binding_key)
134
135        if logger.isEnabledFor(logging.DEBUG):
136            logger.debug('vnd2bindings: %s', pprint.pformat(vnd2bindings))
137
138        return vnd2bindings
139
140    def init_vnd2ref_target(self):
141        # The return value, vnd2ref_target, is a dict mapping vendor
142        # prefixes to ref targets for their relevant sections in this
143        # file, with these special cases:
144        #
145        # - The None key maps to the ref target for bindings with no
146        #   vendor prefix in their compatibles, like 'gpio-keys'
147        # - The (UNKNOWN_VENDOR,) key maps to the ref target for bindings
148        #   whose compatible has a vendor prefix that is not recognized.
149        vnd2ref_target = {}
150
151        for vnd in self.vnd2bindings:
152            if vnd is None:
153                vnd2ref_target[vnd] = 'dt_no_vendor'
154            elif isinstance(vnd, str):
155                vnd2ref_target[vnd] = f'dt_vendor_{vnd}'
156            else:
157                assert vnd == (UNKNOWN_VENDOR,), vnd
158                vnd2ref_target[vnd] = 'dt_unknown_vendor'
159
160        return vnd2ref_target
161
162def main():
163    args = parse_args()
164    setup_logging(args.verbose)
165    bindings = load_bindings(args.dts_roots, args.dts_folders, args.dts_files)
166    base_binding = load_base_binding()
167    driver_sources = load_driver_sources()
168    vnd_lookup = VndLookup(args.vendor_prefixes, bindings)
169    dump_content(bindings, base_binding, vnd_lookup, driver_sources, args.out_dir,
170                 args.turbo_mode)
171
172def parse_args():
173    # Parse command line arguments from sys.argv.
174
175    parser = argparse.ArgumentParser(allow_abbrev=False)
176    parser.add_argument('-v', '--verbose', default=0, action='count',
177                        help='increase verbosity; may be given multiple times')
178    parser.add_argument('--vendor-prefixes', required=True,
179                        help='vendor-prefixes.txt file path')
180    parser.add_argument('--dts-root', dest='dts_roots', action='append',
181                        help='''additional DTS root directory as it would
182                        be set in DTS_ROOTS''')
183    parser.add_argument('--dts-folder', dest='dts_folders', action='append', default=[],
184                        help='additional DTS folders containing binding files')
185    parser.add_argument('--dts-file', dest='dts_files', action='append', default=[],
186                        help='additional individual DTS binding files')
187    parser.add_argument('--turbo-mode', action='store_true',
188                        help='Enable turbo mode (dummy references)')
189    parser.add_argument('out_dir', help='output files are generated here')
190
191    return parser.parse_args()
192
193def setup_logging(verbose):
194    if verbose >= 2:
195        log_level = logging.DEBUG
196    elif verbose == 1:
197        log_level = logging.INFO
198    else:
199        log_level = logging.ERROR
200    logging.basicConfig(format='%(filename)s:%(levelname)s: %(message)s',
201                        level=log_level)
202
203def load_bindings(dts_roots, dts_folders, dts_files):
204    # Get a list of edtlib.Binding objects from searching 'dts_roots'.
205
206    if not dts_roots:
207        sys.exit('no DTS roots; use --dts-root to specify at least one')
208
209    binding_files = []
210    for dts_root in dts_roots:
211        binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yml',
212                                       recursive=True))
213        binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yaml',
214                                       recursive=True))
215    for folders in dts_folders:
216        binding_files.extend(glob.glob(f'{folders}/*.yml', recursive=False))
217        binding_files.extend(glob.glob(f'{folders}/*.yaml', recursive=False))
218    binding_files.extend(dts_files)
219
220    bindings = edtlib.bindings_from_paths(binding_files, ignore_errors=True)
221
222    num_total = len(bindings)
223
224    # Remove bindings from the 'vnd' vendor, which is not a real vendor,
225    # but rather a stand-in we use for examples and tests when a real
226    # vendor would be inappropriate.
227    bindings = [binding for binding in bindings if
228                compatible_vnd(binding.compatible) != 'vnd']
229
230    logger.info('found %d bindings (ignored %d) in this dts_roots list: %s',
231                len(bindings), num_total - len(bindings), dts_roots)
232
233    return bindings
234
235def load_base_binding():
236    # Make a Binding object for base.yaml.
237    #
238    # This helps separate presentation for properties common to all
239    # nodes from node-specific properties.
240
241    base_yaml = ZEPHYR_BASE / 'dts' / 'bindings' / 'base' / 'base.yaml'
242    base_includes = {"pm.yaml": os.fspath(ZEPHYR_BASE / 'dts' / 'bindings' / 'base'/ 'pm.yaml')}
243
244    if not base_yaml.is_file():
245        sys.exit(f'Expected to find base.yaml at {base_yaml}')
246    return edtlib.Binding(os.fspath(base_yaml), base_includes, require_compatible=False,
247                          require_description=False)
248
249def load_driver_sources():
250    driver_sources = {}
251    dt_drv_compat_occurrences = defaultdict(list)
252
253    dt_drv_compat_pattern = re.compile(r"#define DT_DRV_COMPAT\s+(.*)")
254    device_dt_inst_define_pattern = re.compile(r"DEVICE_DT_INST_DEFINE")
255
256    folders_to_scan = ["boards", "drivers", "modules", "soc", "subsys"]
257
258    # When looking at folders_to_scan, a file is considered as a likely driver source if:
259    # - There is only one and only one file with a "#define DT_DRV_COMPAT <compatible>" for a given
260    #   compatible.
261    # - or, a file contains both a "#define DT_DRV_COMPAT <compatible>" and a
262    #   DEVICE_DT_INST_DEFINE(...) call.
263
264    for folder in folders_to_scan:
265        for dirpath, _, filenames in os.walk(ZEPHYR_BASE / folder):
266            for filename in filenames:
267                if not filename.endswith(('.c', '.h')):
268                    continue
269                filepath = Path(dirpath) / filename
270                with open(filepath, encoding="utf-8") as f:
271                    content = f.read()
272
273                relative_path = filepath.relative_to(ZEPHYR_BASE)
274
275                # Find all DT_DRV_COMPAT occurrences in the file
276                dt_drv_compat_matches = dt_drv_compat_pattern.findall(content)
277                for compatible in dt_drv_compat_matches:
278                    dt_drv_compat_occurrences[compatible].append(relative_path)
279
280                if dt_drv_compat_matches and device_dt_inst_define_pattern.search(content):
281                    for compatible in dt_drv_compat_matches:
282                        if compatible in driver_sources:
283                            # Mark as ambiguous if multiple files define the same compatible
284                            driver_sources[compatible] = None
285                        else:
286                            driver_sources[compatible] = relative_path
287
288    # Remove ambiguous driver sources
289    driver_sources = {k: v for k, v in driver_sources.items() if v is not None}
290
291    # Consider DT_DRV_COMPATs with only one occurrence as driver sources
292    for compatible, occurrences in dt_drv_compat_occurrences.items():
293        if compatible not in driver_sources and len(occurrences) == 1:
294            path = occurrences[0]
295            # Assume the driver is defined in the enclosing folder if it's a header file
296            if path.suffix == ".h":
297                path = path.parent
298            driver_sources[compatible] = path
299
300    return driver_sources
301
302def dump_content(bindings, base_binding, vnd_lookup, driver_sources, out_dir, turbo_mode):
303    # Dump the generated .rst files for a vnd2bindings dict.
304    # Files are only written if they are changed. Existing .rst
305    # files which would not be written by the 'vnd2bindings'
306    # dict are removed.
307
308    out_dir = Path(out_dir)
309
310    setup_bindings_dir(bindings, out_dir)
311    if turbo_mode:
312        write_dummy_index(bindings, out_dir)
313    else:
314        write_bindings_rst(vnd_lookup, out_dir)
315        write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir)
316
317def setup_bindings_dir(bindings, out_dir):
318    # Make a set of all the Path objects we will be creating for
319    # out_dir / bindings / {binding_path}.rst. Delete all the ones that
320    # shouldn't be there. Make sure the bindings output directory
321    # exists.
322
323    paths = set()
324    bindings_dir = out_dir / 'bindings'
325    logger.info('making output subdirectory %s', bindings_dir)
326    bindings_dir.mkdir(parents=True, exist_ok=True)
327
328    for binding in bindings:
329        paths.add(bindings_dir / binding_filename(binding))
330
331    for dirpath, _, filenames in os.walk(bindings_dir):
332        for filename in filenames:
333            path = Path(dirpath) / filename
334            if path not in paths:
335                logger.info('removing unexpected file %s', path)
336                path.unlink()
337
338
339def write_dummy_index(bindings, out_dir):
340    # Write out_dir / bindings.rst, with dummy anchors
341
342    # header
343    content = '\n'.join((
344        '.. _devicetree_binding_index:',
345        '.. _dt_vendor_zephyr:',
346        '',
347        'Dummy bindings index',
348        '####################',
349        '',
350    ))
351
352    # build compatibles set and dump it
353    compatibles = {binding.compatible for binding in bindings}
354    content += '\n'.join(
355        f'.. dtcompatible:: {compatible}' for compatible in compatibles
356    )
357
358    write_if_updated(out_dir / 'bindings.rst', content)
359
360
361def write_bindings_rst(vnd_lookup, out_dir):
362    # Write out_dir / bindings.rst, the top level index of bindings.
363
364    string_io = io.StringIO()
365
366    print_block(f'''\
367    .. _devicetree_binding_index:
368
369    Bindings index
370    ##############
371
372    This page documents the available devicetree bindings.
373    See {zref('dt-bindings')} for an introduction to the Zephyr bindings
374    file format.
375
376    Vendor index
377    ************
378
379    This section contains an index of hardware vendors.
380    Click on a vendor's name to go to the list of bindings for
381    that vendor.
382
383    .. rst-class:: rst-columns
384    ''', string_io)
385
386    for vnd, bindings in vnd_lookup.vnd2bindings.items():
387        if len(bindings) == 0:
388            continue
389        print(f'- :ref:`{vnd_lookup.target(vnd)}`', file=string_io)
390
391    print_block('''\
392
393    Bindings by vendor
394    ******************
395
396    This section contains available bindings, grouped by vendor.
397    Within each group, bindings are listed by the "compatible" property
398    they apply to, like this:
399
400    **Vendor name (vendor prefix)**
401
402    .. rst-class:: rst-columns
403
404    - <compatible-A>
405    - <compatible-B> (on <bus-name> bus)
406    - <compatible-C>
407    - ...
408
409    The text "(on <bus-name> bus)" appears when bindings may behave
410    differently depending on the bus the node appears on.
411    For example, this applies to some sensor device nodes, which may
412    appear as children of either I2C or SPI bus nodes.
413    ''', string_io)
414
415    for vnd, bindings in vnd_lookup.vnd2bindings.items():
416        if isinstance(vnd, tuple):
417            title = vnd[0]
418        else:
419            title = vnd_lookup.vendor(vnd).strip()
420            if isinstance(vnd, str):
421                title += f' ({vnd})'
422        underline = '=' * len(title)
423
424        if len(bindings) == 0:
425            continue
426
427        print_block(f'''\
428        .. _{vnd_lookup.target(vnd)}:
429
430        {title}
431        {underline}
432
433        .. rst-class:: rst-columns
434        ''', string_io)
435        for binding in bindings:
436            print(f'- :ref:`{binding_ref_target(binding)}`', file=string_io)
437        print(file=string_io)
438
439    write_if_updated(out_dir / 'bindings.rst', string_io.getvalue())
440
441def write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir):
442    # Write out_dir / bindings / foo / binding_page.rst for each binding
443    # in 'bindings', along with any "disambiguation" pages needed when a
444    # single compatible string can be handled by multiple bindings.
445    #
446    # These files are 'orphans' in the Sphinx sense: they are not in
447    # any toctree.
448
449    logging.info('updating :orphan: files for %d bindings', len(bindings))
450    num_written = 0
451
452    # First, figure out which compatibles map to multiple bindings. We
453    # need this information to decide which of the generated files for
454    # a compatible are "disambiguation" pages that point to per-bus
455    # binding pages, and which ones aren't.
456
457    compat2bindings = defaultdict(list)
458    for binding in bindings:
459        compat2bindings[binding.compatible].append(binding)
460    dup_compat2bindings = {compatible: bindings for compatible, bindings
461                           in compat2bindings.items() if len(bindings) > 1}
462
463    # Next, write the per-binding pages. These contain the
464    # per-compatible targets for compatibles not in 'dup_compats'.
465    # We'll finish up by writing per-compatible "disambiguation" pages
466    # for compatibles in 'dup_compats'.
467
468    # Names of properties in base.yaml.
469    base_names = set(base_binding.prop2specs.keys())
470    for binding in bindings:
471        string_io = io.StringIO()
472
473        print_binding_page(binding, base_names, vnd_lookup,
474                           driver_sources, dup_compat2bindings, string_io)
475
476        written = write_if_updated(out_dir / 'bindings' /
477                                   binding_filename(binding),
478                                   string_io.getvalue())
479
480        if written:
481            num_written += 1
482
483    # Generate disambiguation pages for dup_compats.
484    compatibles_dir = out_dir / 'compatibles'
485    setup_compatibles_dir(dup_compat2bindings.keys(), compatibles_dir)
486    for compatible in dup_compat2bindings:
487        string_io = io.StringIO()
488
489        print_compatible_disambiguation_page(
490            compatible, dup_compat2bindings[compatible], string_io)
491
492        written = write_if_updated(compatibles_dir /
493                                   compatible_filename(compatible),
494                                   string_io.getvalue())
495
496        if written:
497            num_written += 1
498
499    logging.info('done writing :orphan: files; %d files needed updates',
500                 num_written)
501
502def print_binding_page(binding, base_names, vnd_lookup, driver_sources,dup_compats,
503                       string_io):
504    # Print the rst content for 'binding' to 'string_io'. The
505    # 'dup_compats' argument should support membership testing for
506    # compatibles which have multiple associated bindings; if
507    # 'binding.compatible' is not in it, then the ref target for the
508    # entire compatible is generated in this page as well.
509
510    # :orphan:
511    #
512    # .. ref_target:
513    #
514    # Title [(on <bus> bus)]
515    # ######################
516    if binding.on_bus:
517        on_bus_title = f' (on {binding.on_bus} bus)'
518    else:
519        on_bus_title = ''
520    compatible = binding.compatible
521
522    title = f'{compatible}{on_bus_title}'
523    underline = '#' * len(title)
524    if compatible not in dup_compats:
525        # If this binding is the only one that handles this
526        # compatible, point the ".. dtcompatible:" directive straight
527        # to this page. There's no need for disambiguation.
528        dtcompatible = f'.. dtcompatible:: {binding.compatible}'
529    else:
530        # This compatible is handled by multiple bindings;
531        # its ".. dtcompatible::" should be in a disambiguation page
532        # instead.
533        dtcompatible = ''
534
535    print_block(f'''\
536    :orphan:
537
538    .. raw:: html
539
540        <!--
541        FIXME: do not limit page width until content uses another representation
542        format other than tables
543        -->
544        <style>.wy-nav-content {{ max-width: none; !important }}</style>
545
546    {dtcompatible}
547    .. _{binding_ref_target(binding)}:
548
549    {title}
550    {underline}
551    ''', string_io)
552
553    # Vendor: <link-to-vendor-section>
554    vnd = compatible_vnd(compatible)
555    print('Vendor: '
556          f':ref:`{vnd_lookup.vendor(vnd)} <{vnd_lookup.target(vnd)}>`\n',
557          file=string_io)
558
559    # Link to driver implementation (if it exists).
560    compatible = re.sub("[-,.@/+]", "_", compatible.lower())
561    if compatible in driver_sources:
562        print_block(
563            f"""\
564            .. note::
565
566               An implementation of a driver matching this compatible is available in
567               :zephyr_file:`{driver_sources[compatible]}`.
568        """,
569            string_io,
570        )
571
572    # Binding description.
573    if binding.bus:
574        bus_help = f'These nodes are "{binding.bus}" bus nodes.'
575    else:
576        bus_help = ''
577    print_block(f'''\
578    Description
579    ***********
580
581    {bus_help}
582    ''', string_io)
583    print(to_code_block(binding.description.strip()), file=string_io)
584
585    # Properties.
586    print_block('''\
587    Properties
588    **********
589    ''', string_io)
590    print_top_level_properties(binding, base_names, string_io)
591    print_child_binding_properties(binding, string_io)
592
593    # Specifier cells.
594    #
595    # This presentation isn't particularly nice. Perhaps something
596    # better can be done for future work.
597    if binding.specifier2cells:
598        print_block('''\
599        Specifier cell names
600        ********************
601        ''', string_io)
602        for specifier, cells in binding.specifier2cells.items():
603            print(f'- {specifier} cells: {", ".join(cells)}',
604                  file=string_io)
605
606def print_top_level_properties(binding, base_names, string_io):
607    # Print the RST for top level properties for 'binding' to 'string_io'.
608    #
609    # The 'base_names' set contains all the base.yaml properties.
610
611    def prop_table(filter_fn, deprecated):
612        # Get a properly formatted and indented table of properties.
613        specs = [prop_spec for prop_spec in binding.prop2specs.values()
614                 if filter_fn(prop_spec)]
615        indent = ' ' * 14
616        if specs:
617            temp_io = io.StringIO()
618            print_property_table(specs, temp_io, deprecated=deprecated)
619            return textwrap.indent(temp_io.getvalue(), indent)
620
621        return indent + '(None)'
622
623    def node_props_filter(prop_spec):
624        return prop_spec.name not in base_names and not prop_spec.deprecated
625
626    def deprecated_node_props_filter(prop_spec):
627        return prop_spec.name not in base_names and prop_spec.deprecated
628
629    def base_props_filter(prop_spec):
630        return prop_spec.name in base_names
631
632    if binding.child_binding:
633        print_block('''\
634        Top level properties
635        ====================
636        ''', string_io)
637    if binding.prop2specs:
638        if binding.child_binding:
639            print_block(f'''
640            These property descriptions apply to "{binding.compatible}"
641            nodes themselves. This page also describes child node
642            properties in the following sections.
643            ''', string_io)
644
645
646        print_block(f'''\
647        .. tabs::
648
649           .. group-tab:: Node specific properties
650
651              Properties not inherited from the base binding file.
652
653{prop_table(node_props_filter, False)}
654
655           .. group-tab:: Deprecated node specific properties
656
657              Deprecated properties not inherited from the base binding file.
658
659{prop_table(deprecated_node_props_filter, False)}
660
661           .. group-tab:: Base properties
662
663              Properties inherited from the base binding file, which defines
664              common properties that may be set on many nodes. Not all of these
665              may apply to the "{binding.compatible}" compatible.
666
667{prop_table(base_props_filter, True)}
668
669        ''', string_io)
670    else:
671        print('No top-level properties.\n', file=string_io)
672
673def print_child_binding_properties(binding, string_io):
674    # Prints property tables for all levels of nesting of child
675    # bindings.
676
677    level = 1
678    child = binding.child_binding
679    while child is not None:
680        if level == 1:
681            level_string = 'Child'
682        elif level == 2:
683            level_string = 'Grandchild'
684        else:
685            level_string = f'Level {level} child'
686        if child.prop2specs:
687            title = f'{level_string} node properties'
688            underline = '=' * len(title)
689            print(f'{title}\n{underline}\n', file=string_io)
690            print_property_table(child.prop2specs.values(), string_io,
691                                 deprecated=True)
692        child = child.child_binding
693        level += 1
694
695def print_property_table(prop_specs, string_io, deprecated=False):
696    # Writes a table of properties based on 'prop_specs', an iterable
697    # of edtlib.PropertySpec objects, to 'string_io'.
698    #
699    # If 'deprecated' is true and the property is deprecated, an extra
700    # line is printed mentioning that fact. We allow this to be turned
701    # off for tables where all properties are deprecated, so it's
702    # clear from context.
703
704    # Table header.
705    print_block('''\
706    .. list-table::
707       :widths: 1 1 4
708       :header-rows: 1
709
710       * - Name
711         - Type
712         - Details
713    ''', string_io)
714
715    def to_prop_table_row(prop_spec):
716        # Get a multiline string for a PropertySpec table row.
717
718        # The description column combines the description field,
719        # along with things like the default value or enum values.
720        #
721        # The property 'description' field from the binding may span
722        # one or multiple lines. We try to come up with a nice
723        # presentation for each.
724        details = ''
725        raw_prop_descr = prop_spec.description
726        if raw_prop_descr:
727            details += to_code_block(raw_prop_descr)
728
729        if prop_spec.required:
730            details += '\n\nThis property is **required**.'
731
732        if prop_spec.default:
733            details += f'\n\nDefault value: ``{prop_spec.default}``'
734
735        if prop_spec.const:
736            details += f'\n\nConstant value: ``{prop_spec.const}``'
737        elif prop_spec.enum:
738            details += ('\n\nLegal values: ' +
739                        ', '.join(f'``{repr(val)}``' for val in
740                                  prop_spec.enum))
741
742        if prop_spec.name in DETAILS_IN_IMPORTANT_PROPS:
743            details += (f'\n\nSee {zref("dt-important-props")} for more '
744                        'information.')
745
746        if deprecated and prop_spec.deprecated:
747            details += '\n\nThis property is **deprecated**.'
748
749        return f"""\
750   * - ``{prop_spec.name}``
751     - ``{prop_spec.type}``
752     - {textwrap.indent(details, ' ' * 7).lstrip()}
753"""
754
755    # Print each row.
756    for prop_spec in prop_specs:
757        print(to_prop_table_row(prop_spec), file=string_io)
758
759def setup_compatibles_dir(compatibles, compatibles_dir):
760    # Make a set of all the Path objects we will be creating for
761    # out_dir / compatibles / {compatible_path}.rst. Delete all the ones that
762    # shouldn't be there. Make sure the compatibles output directory
763    # exists.
764
765    logger.info('making output subdirectory %s', compatibles_dir)
766    compatibles_dir.mkdir(parents=True, exist_ok=True)
767
768    paths = set(compatibles_dir / compatible_filename(compatible)
769                for compatible in compatibles)
770
771    for path in compatibles_dir.iterdir():
772        if path not in paths:
773            logger.info('removing unexpected file %s', path)
774            path.unlink()
775
776
777def print_compatible_disambiguation_page(compatible, bindings, string_io):
778    # Print the disambiguation page for 'compatible', which can be
779    # handled by any of the bindings in 'bindings', to 'string_io'.
780
781    assert len(bindings) > 1, (compatible, bindings)
782
783    underline = '#' * len(compatible)
784    output_list = '\n    '.join(f'- :ref:`{binding_ref_target(binding)}`'
785                                for binding in bindings)
786
787    print_block(f'''\
788    :orphan:
789
790    .. dtcompatible:: {compatible}
791
792    {compatible}
793    {underline}
794
795    The devicetree compatible ``{compatible}`` may be handled by any
796    of the following bindings:
797
798    {output_list}
799    ''', string_io)
800
801def print_block(block, string_io):
802    # Helper for dedenting and printing a triple-quoted RST block.
803    # (Just a block of text, not necessarily just a 'code-block'
804    # directive.)
805
806    print(textwrap.dedent(block), file=string_io)
807
808def to_code_block(s, indent=0):
809    # Converts 's', a string, to an indented rst .. code-block::. The
810    # 'indent' argument is a leading indent for each line in the code
811    # block, in spaces.
812    indent = indent * ' '
813    return ('.. code-block:: none\n\n' +
814            textwrap.indent(s, indent + '   ') + '\n')
815
816def compatible_vnd(compatible):
817    # Get the vendor prefix for a compatible string 'compatible'.
818    #
819    # For example, compatible_vnd('foo,device') is 'foo'.
820    #
821    # If 'compatible' has no comma (','), None is returned.
822
823    if ',' not in compatible:
824        return None
825
826    return compatible.split(',', 1)[0]
827
828def compatible_filename(compatible):
829    # Name of the per-compatible disambiguation page within the
830    # out_dir / compatibles directory.
831
832    return f'{compatible}.rst'
833
834def zref(target, text=None):
835    # Make an appropriate RST :ref:`text <target>` or :ref:`target`
836    # string to a zephyr documentation ref target 'target', and return
837    # it.
838    #
839    # By default, the bindings docs are in the main Zephyr
840    # documentation, but this script supports putting them in a
841    # separate Sphinx doc set. Since we also link to Zephyr
842    # documentation from the generated content, we have an environment
843    # variable based escape hatch for putting the target in the zephyr
844    # doc set.
845    #
846    # This relies on intersphinx:
847    # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
848
849    docset = os.environ.get('GEN_DEVICETREE_REST_ZEPHYR_DOCSET', '')
850
851    if docset.strip():
852        target = f'{docset}:{target}'
853
854    if text:
855        return f':ref:`{text} <{target}>`'
856
857    return f':ref:`{target}`'
858
859def binding_filename(binding):
860    # Returns the output file name for a binding relative to the
861    # directory containing documentation for all bindings. It does
862    # this by stripping off the '.../dts/bindings/' prefix common to
863    # all bindings files in a DTS_ROOT directory.
864    #
865    # For example, for .../zephyr/dts/bindings/base/base.yaml, this
866    # would return 'base/base.yaml'.
867    #
868    # Hopefully that's unique across roots. If not, we'll need to
869    # update this function.
870
871    as_posix = Path(binding.path).as_posix()
872    dts_bindings = 'dts/bindings/'
873    idx = as_posix.rfind(dts_bindings)
874
875    if idx == -1:
876        raise ValueError(f'binding path has no {dts_bindings}: {binding.path}')
877
878    # Cut past dts/bindings, strip off the extension (.yaml or .yml), and
879    # replace with .rst.
880    return os.path.splitext(as_posix[idx + len(dts_bindings):])[0] + '.rst'
881
882def binding_ref_target(binding):
883    # Return the sphinx ':ref:' target name for a binding.
884
885    stem = Path(binding.path).stem
886    return 'dtbinding_' + re.sub('[/,-]', '_', stem)
887
888def write_if_updated(path, s):
889    # gen_helpers.write_if_updated() wrapper that handles logging and
890    # creating missing parents, as needed.
891
892    if not path.parent.is_dir():
893        path.parent.mkdir(parents=True)
894    written = gen_helpers.write_if_updated(path, s)
895    logger.debug('%s %s', 'wrote' if written else 'did NOT write', path)
896    return written
897
898
899if __name__ == '__main__':
900    main()
901    sys.exit(0)
902