1# Copyright (c) 2020 Nordic Semiconductor ASA 2# SPDX-License-Identifier: Apache-2.0 3 4""" 5Like gen_kconfig_rest.py, but for generating an index of existing 6devicetree bindings. 7""" 8 9import argparse 10from collections import defaultdict 11import glob 12import io 13import logging 14import os 15from pathlib import Path 16import pprint 17import re 18import sys 19import textwrap 20 21from devicetree import edtlib 22 23import gen_helpers 24 25ZEPHYR_BASE = Path(__file__).parents[2] 26 27GENERIC_OR_VENDOR_INDEPENDENT = 'Generic or vendor-independent' 28UNKNOWN_VENDOR = 'Unknown vendor' 29 30ZEPHYR_BASE = Path(__file__).parents[2] 31 32# Base properties that have documentation in 'dt-important-props'. 33DETAILS_IN_IMPORTANT_PROPS = set('compatible label reg status interrupts'.split()) 34 35logger = logging.getLogger('gen_devicetree_rest') 36 37class VndLookup: 38 """ 39 A convenience class for looking up information based on a 40 devicetree compatible's vendor prefix 'vnd'. 41 """ 42 43 def __init__(self, vendor_prefixes, bindings): 44 self.vnd2vendor = self.load_vnd2vendor(vendor_prefixes) 45 self.vnd2bindings = self.init_vnd2bindings(bindings) 46 self.vnd2ref_target = self.init_vnd2ref_target() 47 48 def vendor(self, vnd): 49 return self.vnd2vendor.get(vnd, UNKNOWN_VENDOR) 50 51 def bindings(self, vnd, default=None): 52 return self.vnd2bindings.get(vnd, default) 53 54 def target(self, vnd): 55 return self.vnd2ref_target.get( 56 vnd, self.vnd2ref_target[(UNKNOWN_VENDOR,)]) 57 58 @staticmethod 59 def load_vnd2vendor(vendor_prefixes): 60 # Load the vendor-prefixes.txt file. Return a dict mapping 'vnd' 61 # vendor prefixes as they are found in compatible properties to 62 # each vendor's full name. 63 # 64 # For example, this line: 65 # 66 # vnd A stand-in for a real vendor 67 # 68 # Gets split into a key 'vnd' and a value 'A stand-in for a real 69 # vendor' in the return value. 70 # 71 # The 'None' key maps to GENERIC_OR_VENDOR_INDEPENDENT. 72 73 vnd2vendor = { 74 None: GENERIC_OR_VENDOR_INDEPENDENT, 75 } 76 vnd2vendor.update(edtlib.load_vendor_prefixes_txt(vendor_prefixes)) 77 78 logger.info('found %d vendor prefixes in %s', len(vnd2vendor) - 1, 79 vendor_prefixes) 80 if logger.isEnabledFor(logging.DEBUG): 81 logger.debug('vnd2vendor=%s', pprint.pformat(vnd2vendor)) 82 83 return vnd2vendor 84 85 def init_vnd2bindings(self, bindings): 86 # Take a 'vnd2vendor' map and a list of bindings and return a dict 87 # mapping 'vnd' vendor prefixes prefixes to lists of bindings. The 88 # bindings in each list are sorted by compatible. The keys in the 89 # return value are sorted by vendor name. 90 # 91 # Special cases: 92 # 93 # - The 'None' key maps to bindings with no vendor prefix 94 # in their compatibles, like 'gpio-keys'. This is the first key. 95 # - The (UNKNOWN_VENDOR,) key maps to bindings whose compatible 96 # has a vendor prefix that exists, but is not known, 97 # like 'somethingrandom,device'. This is the last key. 98 99 # Get an unsorted dict mapping vendor prefixes to lists of bindings. 100 unsorted = defaultdict(list) 101 generic_bindings = [] 102 unknown_vendor_bindings = [] 103 for binding in bindings: 104 vnd = compatible_vnd(binding.compatible) 105 if vnd is None: 106 generic_bindings.append(binding) 107 elif vnd in self.vnd2vendor: 108 unsorted[vnd].append(binding) 109 else: 110 unknown_vendor_bindings.append(binding) 111 112 # Key functions for sorting. 113 def vnd_key(vnd): 114 return self.vnd2vendor[vnd].casefold() 115 116 def binding_key(binding): 117 return binding.compatible 118 119 # Sort the bindings for each vendor by compatible. 120 # Plain dicts are sorted in CPython 3.6+, which is what we 121 # support, so the return dict's keys are in the same 122 # order as vnd2vendor. 123 # 124 # The unknown-vendor bindings being inserted as a 1-tuple key is a 125 # hack for convenience that ensures they won't collide with a 126 # known vendor. The code that consumes the dict below handles 127 # this. 128 vnd2bindings = { 129 None: sorted(generic_bindings, key=binding_key) 130 } 131 for vnd in sorted(unsorted, key=vnd_key): 132 vnd2bindings[vnd] = sorted(unsorted[vnd], key=binding_key) 133 vnd2bindings[(UNKNOWN_VENDOR,)] = sorted(unknown_vendor_bindings, 134 key=binding_key) 135 136 if logger.isEnabledFor(logging.DEBUG): 137 logger.debug('vnd2bindings: %s', pprint.pformat(vnd2bindings)) 138 139 return vnd2bindings 140 141 def init_vnd2ref_target(self): 142 # The return value, vnd2ref_target, is a dict mapping vendor 143 # prefixes to ref targets for their relevant sections in this 144 # file, with these special cases: 145 # 146 # - The None key maps to the ref target for bindings with no 147 # vendor prefix in their compatibles, like 'gpio-keys' 148 # - The (UNKNOWN_VENDOR,) key maps to the ref target for bindings 149 # whose compatible has a vendor prefix that is not recognized. 150 vnd2ref_target = {} 151 152 for vnd in self.vnd2bindings: 153 if vnd is None: 154 vnd2ref_target[vnd] = 'dt_no_vendor' 155 elif isinstance(vnd, str): 156 vnd2ref_target[vnd] = f'dt_vendor_{vnd}' 157 else: 158 assert vnd == (UNKNOWN_VENDOR,), vnd 159 vnd2ref_target[vnd] = 'dt_unknown_vendor' 160 161 return vnd2ref_target 162 163def main(): 164 args = parse_args() 165 setup_logging(args.verbose) 166 bindings = load_bindings(args.dts_roots) 167 base_binding = load_base_binding() 168 vnd_lookup = VndLookup(args.vendor_prefixes, bindings) 169 dump_content(bindings, base_binding, vnd_lookup, args.out_dir, 170 args.turbo_mode) 171 172def parse_args(): 173 # Parse command line arguments from sys.argv. 174 175 parser = argparse.ArgumentParser(allow_abbrev=False) 176 parser.add_argument('-v', '--verbose', default=0, action='count', 177 help='increase verbosity; may be given multiple times') 178 parser.add_argument('--vendor-prefixes', required=True, 179 help='vendor-prefixes.txt file path') 180 parser.add_argument('--dts-root', dest='dts_roots', action='append', 181 help='''additional DTS root directory as it would 182 be set in DTS_ROOTS''') 183 parser.add_argument('--turbo-mode', action='store_true', 184 help='Enable turbo mode (dummy references)') 185 parser.add_argument('out_dir', help='output files are generated here') 186 187 return parser.parse_args() 188 189def setup_logging(verbose): 190 if verbose >= 2: 191 log_level = logging.DEBUG 192 elif verbose == 1: 193 log_level = logging.INFO 194 else: 195 log_level = logging.ERROR 196 logging.basicConfig(format='%(filename)s:%(levelname)s: %(message)s', 197 level=log_level) 198 199def load_bindings(dts_roots): 200 # Get a list of edtlib.Binding objects from searching 'dts_roots'. 201 202 if not dts_roots: 203 sys.exit('no DTS roots; use --dts-root to specify at least one') 204 205 binding_files = [] 206 for dts_root in dts_roots: 207 binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yml', 208 recursive=True)) 209 binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yaml', 210 recursive=True)) 211 212 bindings = edtlib.bindings_from_paths(binding_files, ignore_errors=True) 213 214 num_total = len(bindings) 215 216 # Remove bindings from the 'vnd' vendor, which is not a real vendor, 217 # but rather a stand-in we use for examples and tests when a real 218 # vendor would be inappropriate. 219 bindings = [binding for binding in bindings if 220 compatible_vnd(binding.compatible) != 'vnd'] 221 222 logger.info('found %d bindings (ignored %d) in this dts_roots list: %s', 223 len(bindings), num_total - len(bindings), dts_roots) 224 225 return bindings 226 227def load_base_binding(): 228 # Make a Binding object for base.yaml. 229 # 230 # This helps separate presentation for properties common to all 231 # nodes from node-specific properties. 232 233 base_yaml = ZEPHYR_BASE / 'dts' / 'bindings' / 'base' / 'base.yaml' 234 base_includes = {"pm.yaml": os.fspath(ZEPHYR_BASE / 'dts' / 'bindings' / 'base'/ 'pm.yaml')} 235 236 if not base_yaml.is_file(): 237 sys.exit(f'Expected to find base.yaml at {base_yaml}') 238 return edtlib.Binding(os.fspath(base_yaml), base_includes, require_compatible=False, 239 require_description=False) 240 241def dump_content(bindings, base_binding, vnd_lookup, out_dir, turbo_mode): 242 # Dump the generated .rst files for a vnd2bindings dict. 243 # Files are only written if they are changed. Existing .rst 244 # files which would not be written by the 'vnd2bindings' 245 # dict are removed. 246 247 out_dir = Path(out_dir) 248 249 setup_bindings_dir(bindings, out_dir) 250 if turbo_mode: 251 write_dummy_index(bindings, out_dir) 252 else: 253 write_bindings_rst(vnd_lookup, out_dir) 254 write_orphans(bindings, base_binding, vnd_lookup, out_dir) 255 256def setup_bindings_dir(bindings, out_dir): 257 # Make a set of all the Path objects we will be creating for 258 # out_dir / bindings / {binding_path}.rst. Delete all the ones that 259 # shouldn't be there. Make sure the bindings output directory 260 # exists. 261 262 paths = set() 263 bindings_dir = out_dir / 'bindings' 264 logger.info('making output subdirectory %s', bindings_dir) 265 bindings_dir.mkdir(parents=True, exist_ok=True) 266 267 for binding in bindings: 268 paths.add(bindings_dir / binding_filename(binding)) 269 270 for dirpath, _, filenames in os.walk(bindings_dir): 271 for filename in filenames: 272 path = Path(dirpath) / filename 273 if path not in paths: 274 logger.info('removing unexpected file %s', path) 275 path.unlink() 276 277 278def write_dummy_index(bindings, out_dir): 279 # Write out_dir / bindings.rst, with dummy anchors 280 281 # header 282 content = '\n'.join(( 283 '.. _devicetree_binding_index:', 284 '.. _dt_vendor_zephyr:', 285 '', 286 'Dummy bindings index', 287 '####################', 288 '', 289 )) 290 291 # build compatibles set and dump it 292 compatibles = {binding.compatible for binding in bindings} 293 content += '\n'.join(( 294 f'.. dtcompatible:: {compatible}' for compatible in compatibles 295 )) 296 297 write_if_updated(out_dir / 'bindings.rst', content) 298 299 300def write_bindings_rst(vnd_lookup, out_dir): 301 # Write out_dir / bindings.rst, the top level index of bindings. 302 303 string_io = io.StringIO() 304 305 print_block(f'''\ 306 .. _devicetree_binding_index: 307 308 Bindings index 309 ############## 310 311 This page documents the available devicetree bindings. 312 See {zref('dt-bindings')} for an introduction to the Zephyr bindings 313 file format. 314 315 Vendor index 316 ************ 317 318 This section contains an index of hardware vendors. 319 Click on a vendor's name to go to the list of bindings for 320 that vendor. 321 322 .. rst-class:: rst-columns 323 ''', string_io) 324 325 for vnd in vnd_lookup.vnd2bindings: 326 print(f'- :ref:`{vnd_lookup.target(vnd)}`', file=string_io) 327 328 print_block('''\ 329 330 Bindings by vendor 331 ****************** 332 333 This section contains available bindings, grouped by vendor. 334 Within each group, bindings are listed by the "compatible" property 335 they apply to, like this: 336 337 **Vendor name (vendor prefix)** 338 339 .. rst-class:: rst-columns 340 341 - <compatible-A> 342 - <compatible-B> (on <bus-name> bus) 343 - <compatible-C> 344 - ... 345 346 The text "(on <bus-name> bus)" appears when bindings may behave 347 differently depending on the bus the node appears on. 348 For example, this applies to some sensor device nodes, which may 349 appear as children of either I2C or SPI bus nodes. 350 ''', string_io) 351 352 for vnd, bindings in vnd_lookup.vnd2bindings.items(): 353 if isinstance(vnd, tuple): 354 title = vnd[0] 355 else: 356 title = vnd_lookup.vendor(vnd).strip() 357 if isinstance(vnd, str): 358 title += f' ({vnd})' 359 underline = '=' * len(title) 360 361 print_block(f'''\ 362 .. _{vnd_lookup.target(vnd)}: 363 364 {title} 365 {underline} 366 367 .. rst-class:: rst-columns 368 ''', string_io) 369 for binding in bindings: 370 print(f'- :ref:`{binding_ref_target(binding)}`', file=string_io) 371 print(file=string_io) 372 373 write_if_updated(out_dir / 'bindings.rst', string_io.getvalue()) 374 375def write_orphans(bindings, base_binding, vnd_lookup, out_dir): 376 # Write out_dir / bindings / foo / binding_page.rst for each binding 377 # in 'bindings', along with any "disambiguation" pages needed when a 378 # single compatible string can be handled by multiple bindings. 379 # 380 # These files are 'orphans' in the Sphinx sense: they are not in 381 # any toctree. 382 383 logging.info('updating :orphan: files for %d bindings', len(bindings)) 384 num_written = 0 385 386 # First, figure out which compatibles map to multiple bindings. We 387 # need this information to decide which of the generated files for 388 # a compatible are "disambiguation" pages that point to per-bus 389 # binding pages, and which ones aren't. 390 391 compat2bindings = defaultdict(list) 392 for binding in bindings: 393 compat2bindings[binding.compatible].append(binding) 394 dup_compat2bindings = {compatible: bindings for compatible, bindings 395 in compat2bindings.items() if len(bindings) > 1} 396 397 # Next, write the per-binding pages. These contain the 398 # per-compatible targets for compatibles not in 'dup_compats'. 399 # We'll finish up by writing per-compatible "disambiguation" pages 400 # for compatibles in 'dup_compats'. 401 402 # Names of properties in base.yaml. 403 base_names = set(base_binding.prop2specs.keys()) 404 for binding in bindings: 405 string_io = io.StringIO() 406 407 print_binding_page(binding, base_names, vnd_lookup, 408 dup_compat2bindings, string_io) 409 410 written = write_if_updated(out_dir / 'bindings' / 411 binding_filename(binding), 412 string_io.getvalue()) 413 414 if written: 415 num_written += 1 416 417 # Generate disambiguation pages for dup_compats. 418 compatibles_dir = out_dir / 'compatibles' 419 setup_compatibles_dir(dup_compat2bindings.keys(), compatibles_dir) 420 for compatible in dup_compat2bindings: 421 string_io = io.StringIO() 422 423 print_compatible_disambiguation_page( 424 compatible, dup_compat2bindings[compatible], string_io) 425 426 written = write_if_updated(compatibles_dir / 427 compatible_filename(compatible), 428 string_io.getvalue()) 429 430 if written: 431 num_written += 1 432 433 logging.info('done writing :orphan: files; %d files needed updates', 434 num_written) 435 436def print_binding_page(binding, base_names, vnd_lookup, dup_compats, 437 string_io): 438 # Print the rst content for 'binding' to 'string_io'. The 439 # 'dup_compats' argument should support membership testing for 440 # compatibles which have multiple associated bindings; if 441 # 'binding.compatible' is not in it, then the ref target for the 442 # entire compatible is generated in this page as well. 443 444 # :orphan: 445 # 446 # .. ref_target: 447 # 448 # Title [(on <bus> bus)] 449 # ###################### 450 if binding.on_bus: 451 on_bus_title = f' (on {binding.on_bus} bus)' 452 else: 453 on_bus_title = '' 454 compatible = binding.compatible 455 456 title = f'{compatible}{on_bus_title}' 457 underline = '#' * len(title) 458 if compatible not in dup_compats: 459 # If this binding is the only one that handles this 460 # compatible, point the ".. dtcompatible:" directive straight 461 # to this page. There's no need for disambiguation. 462 dtcompatible = f'.. dtcompatible:: {binding.compatible}' 463 else: 464 # This compatible is handled by multiple bindings; 465 # its ".. dtcompatible::" should be in a disambiguation page 466 # instead. 467 dtcompatible = '' 468 469 print_block(f'''\ 470 :orphan: 471 472 .. raw:: html 473 474 <!-- 475 FIXME: do not limit page width until content uses another representation 476 format other than tables 477 --> 478 <style>.wy-nav-content {{ max-width: none; !important }}</style> 479 480 {dtcompatible} 481 .. _{binding_ref_target(binding)}: 482 483 {title} 484 {underline} 485 ''', string_io) 486 487 # Vendor: <link-to-vendor-section> 488 vnd = compatible_vnd(compatible) 489 print('Vendor: ' 490 f':ref:`{vnd_lookup.vendor(vnd)} <{vnd_lookup.target(vnd)}>`\n', 491 file=string_io) 492 493 # Binding description. 494 if binding.bus: 495 bus_help = f'These nodes are "{binding.bus}" bus nodes.' 496 else: 497 bus_help = '' 498 print_block(f'''\ 499 Description 500 *********** 501 502 {bus_help} 503 ''', string_io) 504 print(to_code_block(binding.description.strip()), file=string_io) 505 506 # Properties. 507 print_block('''\ 508 Properties 509 ********** 510 ''', string_io) 511 print_top_level_properties(binding, base_names, string_io) 512 print_child_binding_properties(binding, string_io) 513 514 # Specifier cells. 515 # 516 # This presentation isn't particularly nice. Perhaps something 517 # better can be done for future work. 518 if binding.specifier2cells: 519 print_block('''\ 520 Specifier cell names 521 ******************** 522 ''', string_io) 523 for specifier, cells in binding.specifier2cells.items(): 524 print(f'- {specifier} cells: {", ".join(cells)}', 525 file=string_io) 526 527def print_top_level_properties(binding, base_names, string_io): 528 # Print the RST for top level properties for 'binding' to 'string_io'. 529 # 530 # The 'base_names' set contains all the base.yaml properties. 531 532 def prop_table(filter_fn, deprecated): 533 # Get a properly formatted and indented table of properties. 534 specs = [prop_spec for prop_spec in binding.prop2specs.values() 535 if filter_fn(prop_spec)] 536 indent = ' ' * 14 537 if specs: 538 temp_io = io.StringIO() 539 print_property_table(specs, temp_io, deprecated=deprecated) 540 return textwrap.indent(temp_io.getvalue(), indent) 541 542 return indent + '(None)' 543 544 def node_props_filter(prop_spec): 545 return prop_spec.name not in base_names and not prop_spec.deprecated 546 547 def deprecated_node_props_filter(prop_spec): 548 return prop_spec.name not in base_names and prop_spec.deprecated 549 550 def base_props_filter(prop_spec): 551 return prop_spec.name in base_names 552 553 if binding.child_binding: 554 print_block('''\ 555 Top level properties 556 ==================== 557 ''', string_io) 558 if binding.prop2specs: 559 if binding.child_binding: 560 print_block(f''' 561 These property descriptions apply to "{binding.compatible}" 562 nodes themselves. This page also describes child node 563 properties in the following sections. 564 ''', string_io) 565 566 567 print_block(f'''\ 568 .. tabs:: 569 570 .. group-tab:: Node specific properties 571 572 Properties not inherited from the base binding file. 573 574{prop_table(node_props_filter, False)} 575 576 .. group-tab:: Deprecated node specific properties 577 578 Deprecated properties not inherited from the base binding file. 579 580{prop_table(deprecated_node_props_filter, False)} 581 582 .. group-tab:: Base properties 583 584 Properties inherited from the base binding file, which defines 585 common properties that may be set on many nodes. Not all of these 586 may apply to the "{binding.compatible}" compatible. 587 588{prop_table(base_props_filter, True)} 589 590 ''', string_io) 591 else: 592 print('No top-level properties.\n', file=string_io) 593 594def print_child_binding_properties(binding, string_io): 595 # Prints property tables for all levels of nesting of child 596 # bindings. 597 598 level = 1 599 child = binding.child_binding 600 while child is not None: 601 if level == 1: 602 level_string = 'Child' 603 elif level == 2: 604 level_string = 'Grandchild' 605 else: 606 level_string = f'Level {level} child' 607 if child.prop2specs: 608 title = f'{level_string} node properties' 609 underline = '=' * len(title) 610 print(f'{title}\n{underline}\n', file=string_io) 611 print_property_table(child.prop2specs.values(), string_io, 612 deprecated=True) 613 child = child.child_binding 614 level += 1 615 616def print_property_table(prop_specs, string_io, deprecated=False): 617 # Writes a table of properties based on 'prop_specs', an iterable 618 # of edtlib.PropertySpec objects, to 'string_io'. 619 # 620 # If 'deprecated' is true and the property is deprecated, an extra 621 # line is printed mentioning that fact. We allow this to be turned 622 # off for tables where all properties are deprecated, so it's 623 # clear from context. 624 625 # Table header. 626 print_block('''\ 627 .. list-table:: 628 :widths: 1 1 4 629 :header-rows: 1 630 631 * - Name 632 - Type 633 - Details 634 ''', string_io) 635 636 def to_prop_table_row(prop_spec): 637 # Get a multiline string for a PropertySpec table row. 638 639 # The description column combines the description field, 640 # along with things like the default value or enum values. 641 # 642 # The property 'description' field from the binding may span 643 # one or multiple lines. We try to come up with a nice 644 # presentation for each. 645 details = '' 646 raw_prop_descr = prop_spec.description 647 if raw_prop_descr: 648 details += to_code_block(raw_prop_descr) 649 650 if prop_spec.required: 651 details += '\n\nThis property is **required**.' 652 653 if prop_spec.default: 654 details += f'\n\nDefault value: ``{prop_spec.default}``' 655 656 if prop_spec.const: 657 details += f'\n\nConstant value: ``{prop_spec.const}``' 658 elif prop_spec.enum: 659 details += ('\n\nLegal values: ' + 660 ', '.join(f'``{repr(val)}``' for val in 661 prop_spec.enum)) 662 663 if prop_spec.name in DETAILS_IN_IMPORTANT_PROPS: 664 details += (f'\n\nSee {zref("dt-important-props")} for more ' 665 'information.') 666 667 if deprecated and prop_spec.deprecated: 668 details += '\n\nThis property is **deprecated**.' 669 670 return f"""\ 671 * - ``{prop_spec.name}`` 672 - ``{prop_spec.type}`` 673 - {textwrap.indent(details, ' ' * 7).lstrip()} 674""" 675 676 # Print each row. 677 for prop_spec in prop_specs: 678 print(to_prop_table_row(prop_spec), file=string_io) 679 680def setup_compatibles_dir(compatibles, compatibles_dir): 681 # Make a set of all the Path objects we will be creating for 682 # out_dir / compatibles / {compatible_path}.rst. Delete all the ones that 683 # shouldn't be there. Make sure the compatibles output directory 684 # exists. 685 686 logger.info('making output subdirectory %s', compatibles_dir) 687 compatibles_dir.mkdir(parents=True, exist_ok=True) 688 689 paths = set(compatibles_dir / compatible_filename(compatible) 690 for compatible in compatibles) 691 692 for path in compatibles_dir.iterdir(): 693 if path not in paths: 694 logger.info('removing unexpected file %s', path) 695 path.unlink() 696 697 698def print_compatible_disambiguation_page(compatible, bindings, string_io): 699 # Print the disambiguation page for 'compatible', which can be 700 # handled by any of the bindings in 'bindings', to 'string_io'. 701 702 assert len(bindings) > 1, (compatible, bindings) 703 704 underline = '#' * len(compatible) 705 output_list = '\n '.join(f'- :ref:`{binding_ref_target(binding)}`' 706 for binding in bindings) 707 708 print_block(f'''\ 709 :orphan: 710 711 .. dtcompatible:: {compatible} 712 713 {compatible} 714 {underline} 715 716 The devicetree compatible ``{compatible}`` may be handled by any 717 of the following bindings: 718 719 {output_list} 720 ''', string_io) 721 722def print_block(block, string_io): 723 # Helper for dedenting and printing a triple-quoted RST block. 724 # (Just a block of text, not necessarily just a 'code-block' 725 # directive.) 726 727 print(textwrap.dedent(block), file=string_io) 728 729def to_code_block(s, indent=0): 730 # Converts 's', a string, to an indented rst .. code-block::. The 731 # 'indent' argument is a leading indent for each line in the code 732 # block, in spaces. 733 indent = indent * ' ' 734 return ('.. code-block:: none\n\n' + 735 textwrap.indent(s, indent + ' ') + '\n') 736 737def compatible_vnd(compatible): 738 # Get the vendor prefix for a compatible string 'compatible'. 739 # 740 # For example, compatible_vnd('foo,device') is 'foo'. 741 # 742 # If 'compatible' has no comma (','), None is returned. 743 744 if ',' not in compatible: 745 return None 746 747 return compatible.split(',', 1)[0] 748 749def compatible_filename(compatible): 750 # Name of the per-compatible disambiguation page within the 751 # out_dir / compatibles directory. 752 753 return f'{compatible}.rst' 754 755def zref(target, text=None): 756 # Make an appropriate RST :ref:`text <target>` or :ref:`target` 757 # string to a zephyr documentation ref target 'target', and return 758 # it. 759 # 760 # By default, the bindings docs are in the main Zephyr 761 # documentation, but this script supports putting them in a 762 # separate Sphinx doc set. Since we also link to Zephyr 763 # documentation from the generated content, we have an environment 764 # variable based escape hatch for putting the target in the zephyr 765 # doc set. 766 # 767 # This relies on intersphinx: 768 # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html 769 770 docset = os.environ.get('GEN_DEVICETREE_REST_ZEPHYR_DOCSET', '') 771 772 if docset.strip(): 773 target = f'{docset}:{target}' 774 775 if text: 776 return f':ref:`{text} <{target}>`' 777 778 return f':ref:`{target}`' 779 780def binding_filename(binding): 781 # Returns the output file name for a binding relative to the 782 # directory containing documentation for all bindings. It does 783 # this by stripping off the '.../dts/bindings/' prefix common to 784 # all bindings files in a DTS_ROOT directory. 785 # 786 # For example, for .../zephyr/dts/bindings/base/base.yaml, this 787 # would return 'base/base.yaml'. 788 # 789 # Hopefully that's unique across roots. If not, we'll need to 790 # update this function. 791 792 as_posix = Path(binding.path).as_posix() 793 dts_bindings = 'dts/bindings/' 794 idx = as_posix.rfind(dts_bindings) 795 796 if idx == -1: 797 raise ValueError(f'binding path has no {dts_bindings}: {binding.path}') 798 799 # Cut past dts/bindings, strip off the extension (.yaml or .yml), and 800 # replace with .rst. 801 return os.path.splitext(as_posix[idx + len(dts_bindings):])[0] + '.rst' 802 803def binding_ref_target(binding): 804 # Return the sphinx ':ref:' target name for a binding. 805 806 stem = Path(binding.path).stem 807 return 'dtbinding_' + re.sub('[/,-]', '_', stem) 808 809def write_if_updated(path, s): 810 # gen_helpers.write_if_updated() wrapper that handles logging and 811 # creating missing parents, as needed. 812 813 if not path.parent.is_dir(): 814 path.parent.mkdir(parents=True) 815 written = gen_helpers.write_if_updated(path, s) 816 logger.debug('%s %s', 'wrote' if written else 'did NOT write', path) 817 return written 818 819 820if __name__ == '__main__': 821 main() 822 sys.exit(0) 823