1# Copyright (c) 2020 Nordic Semiconductor ASA 2# SPDX-License-Identifier: Apache-2.0 3 4""" 5Like gen_kconfig_rest.py, but for generating an index of existing 6devicetree bindings. 7""" 8 9import argparse 10from collections import defaultdict 11import glob 12import io 13import logging 14import os 15from pathlib import Path 16import pprint 17import re 18import sys 19import textwrap 20 21from devicetree import edtlib 22 23import gen_helpers 24 25ZEPHYR_BASE = Path(__file__).parents[2] 26 27GENERIC_OR_VENDOR_INDEPENDENT = 'Generic or vendor-independent' 28UNKNOWN_VENDOR = 'Unknown vendor' 29 30ZEPHYR_BASE = Path(__file__).parents[2] 31 32# Base properties that have documentation in 'dt-important-props'. 33DETAILS_IN_IMPORTANT_PROPS = set('compatible label reg status interrupts'.split()) 34 35logger = logging.getLogger('gen_devicetree_rest') 36 37class VndLookup: 38 """ 39 A convenience class for looking up information based on a 40 devicetree compatible's vendor prefix 'vnd'. 41 """ 42 43 def __init__(self, vendor_prefixes, bindings): 44 self.vnd2vendor = self.load_vnd2vendor(vendor_prefixes) 45 self.vnd2bindings = self.init_vnd2bindings(bindings) 46 self.vnd2ref_target = self.init_vnd2ref_target() 47 48 def vendor(self, vnd): 49 return self.vnd2vendor.get(vnd, UNKNOWN_VENDOR) 50 51 def bindings(self, vnd, default=None): 52 return self.vnd2bindings.get(vnd, default) 53 54 def target(self, vnd): 55 return self.vnd2ref_target.get( 56 vnd, self.vnd2ref_target[(UNKNOWN_VENDOR,)]) 57 58 @staticmethod 59 def load_vnd2vendor(vendor_prefixes): 60 # Load the vendor-prefixes.txt file. Return a dict mapping 'vnd' 61 # vendor prefixes as they are found in compatible properties to 62 # each vendor's full name. 63 # 64 # For example, this line: 65 # 66 # vnd A stand-in for a real vendor 67 # 68 # Gets split into a key 'vnd' and a value 'A stand-in for a real 69 # vendor' in the return value. 70 # 71 # The 'None' key maps to GENERIC_OR_VENDOR_INDEPENDENT. 72 73 vnd2vendor = { 74 None: GENERIC_OR_VENDOR_INDEPENDENT, 75 } 76 vnd2vendor.update(edtlib.load_vendor_prefixes_txt(vendor_prefixes)) 77 78 logger.info('found %d vendor prefixes in %s', len(vnd2vendor) - 1, 79 vendor_prefixes) 80 if logger.isEnabledFor(logging.DEBUG): 81 logger.debug('vnd2vendor=%s', pprint.pformat(vnd2vendor)) 82 83 return vnd2vendor 84 85 def init_vnd2bindings(self, bindings): 86 # Take a 'vnd2vendor' map and a list of bindings and return a dict 87 # mapping 'vnd' vendor prefixes prefixes to lists of bindings. The 88 # bindings in each list are sorted by compatible. The keys in the 89 # return value are sorted by vendor name. 90 # 91 # Special cases: 92 # 93 # - The 'None' key maps to bindings with no vendor prefix 94 # in their compatibles, like 'gpio-keys'. This is the first key. 95 # - The (UNKNOWN_VENDOR,) key maps to bindings whose compatible 96 # has a vendor prefix that exists, but is not known, 97 # like 'somethingrandom,device'. This is the last key. 98 99 # Get an unsorted dict mapping vendor prefixes to lists of bindings. 100 unsorted = defaultdict(list) 101 generic_bindings = [] 102 unknown_vendor_bindings = [] 103 for binding in bindings: 104 vnd = compatible_vnd(binding.compatible) 105 if vnd is None: 106 generic_bindings.append(binding) 107 elif vnd in self.vnd2vendor: 108 unsorted[vnd].append(binding) 109 else: 110 unknown_vendor_bindings.append(binding) 111 112 # Key functions for sorting. 113 def vnd_key(vnd): 114 return self.vnd2vendor[vnd].casefold() 115 116 def binding_key(binding): 117 return binding.compatible 118 119 # Sort the bindings for each vendor by compatible. 120 # Plain dicts are sorted in CPython 3.6+, which is what we 121 # support, so the return dict's keys are in the same 122 # order as vnd2vendor. 123 # 124 # The unknown-vendor bindings being inserted as a 1-tuple key is a 125 # hack for convenience that ensures they won't collide with a 126 # known vendor. The code that consumes the dict below handles 127 # this. 128 vnd2bindings = { 129 None: sorted(generic_bindings, key=binding_key) 130 } 131 for vnd in sorted(unsorted, key=vnd_key): 132 vnd2bindings[vnd] = sorted(unsorted[vnd], key=binding_key) 133 vnd2bindings[(UNKNOWN_VENDOR,)] = sorted(unknown_vendor_bindings, 134 key=binding_key) 135 136 if logger.isEnabledFor(logging.DEBUG): 137 logger.debug('vnd2bindings: %s', pprint.pformat(vnd2bindings)) 138 139 return vnd2bindings 140 141 def init_vnd2ref_target(self): 142 # The return value, vnd2ref_target, is a dict mapping vendor 143 # prefixes to ref targets for their relevant sections in this 144 # file, with these special cases: 145 # 146 # - The None key maps to the ref target for bindings with no 147 # vendor prefix in their compatibles, like 'gpio-keys' 148 # - The (UNKNOWN_VENDOR,) key maps to the ref target for bindings 149 # whose compatible has a vendor prefix that is not recognized. 150 vnd2ref_target = {} 151 152 for vnd in self.vnd2bindings: 153 if vnd is None: 154 vnd2ref_target[vnd] = 'dt_no_vendor' 155 elif isinstance(vnd, str): 156 vnd2ref_target[vnd] = f'dt_vendor_{vnd}' 157 else: 158 assert vnd == (UNKNOWN_VENDOR,), vnd 159 vnd2ref_target[vnd] = 'dt_unknown_vendor' 160 161 return vnd2ref_target 162 163def main(): 164 args = parse_args() 165 setup_logging(args.verbose) 166 bindings = load_bindings(args.dts_roots, args.dts_folders) 167 base_binding = load_base_binding() 168 vnd_lookup = VndLookup(args.vendor_prefixes, bindings) 169 dump_content(bindings, base_binding, vnd_lookup, args.out_dir, 170 args.turbo_mode) 171 172def parse_args(): 173 # Parse command line arguments from sys.argv. 174 175 parser = argparse.ArgumentParser(allow_abbrev=False) 176 parser.add_argument('-v', '--verbose', default=0, action='count', 177 help='increase verbosity; may be given multiple times') 178 parser.add_argument('--vendor-prefixes', required=True, 179 help='vendor-prefixes.txt file path') 180 parser.add_argument('--dts-root', dest='dts_roots', action='append', 181 help='''additional DTS root directory as it would 182 be set in DTS_ROOTS''') 183 parser.add_argument('--dts-folder', dest='dts_folders', action='append', default=[], 184 help='additional DTS folders containing binding files') 185 parser.add_argument('--turbo-mode', action='store_true', 186 help='Enable turbo mode (dummy references)') 187 parser.add_argument('out_dir', help='output files are generated here') 188 189 return parser.parse_args() 190 191def setup_logging(verbose): 192 if verbose >= 2: 193 log_level = logging.DEBUG 194 elif verbose == 1: 195 log_level = logging.INFO 196 else: 197 log_level = logging.ERROR 198 logging.basicConfig(format='%(filename)s:%(levelname)s: %(message)s', 199 level=log_level) 200 201def load_bindings(dts_roots, dts_folders): 202 # Get a list of edtlib.Binding objects from searching 'dts_roots'. 203 204 if not dts_roots: 205 sys.exit('no DTS roots; use --dts-root to specify at least one') 206 207 binding_files = [] 208 for dts_root in dts_roots: 209 binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yml', 210 recursive=True)) 211 binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yaml', 212 recursive=True)) 213 for folders in dts_folders: 214 binding_files.extend(glob.glob(f'{folders}/*.yml', recursive=False)) 215 binding_files.extend(glob.glob(f'{folders}/*.yaml', recursive=False)) 216 217 bindings = edtlib.bindings_from_paths(binding_files, ignore_errors=True) 218 219 num_total = len(bindings) 220 221 # Remove bindings from the 'vnd' vendor, which is not a real vendor, 222 # but rather a stand-in we use for examples and tests when a real 223 # vendor would be inappropriate. 224 bindings = [binding for binding in bindings if 225 compatible_vnd(binding.compatible) != 'vnd'] 226 227 logger.info('found %d bindings (ignored %d) in this dts_roots list: %s', 228 len(bindings), num_total - len(bindings), dts_roots) 229 230 return bindings 231 232def load_base_binding(): 233 # Make a Binding object for base.yaml. 234 # 235 # This helps separate presentation for properties common to all 236 # nodes from node-specific properties. 237 238 base_yaml = ZEPHYR_BASE / 'dts' / 'bindings' / 'base' / 'base.yaml' 239 base_includes = {"pm.yaml": os.fspath(ZEPHYR_BASE / 'dts' / 'bindings' / 'base'/ 'pm.yaml')} 240 241 if not base_yaml.is_file(): 242 sys.exit(f'Expected to find base.yaml at {base_yaml}') 243 return edtlib.Binding(os.fspath(base_yaml), base_includes, require_compatible=False, 244 require_description=False) 245 246def dump_content(bindings, base_binding, vnd_lookup, out_dir, turbo_mode): 247 # Dump the generated .rst files for a vnd2bindings dict. 248 # Files are only written if they are changed. Existing .rst 249 # files which would not be written by the 'vnd2bindings' 250 # dict are removed. 251 252 out_dir = Path(out_dir) 253 254 setup_bindings_dir(bindings, out_dir) 255 if turbo_mode: 256 write_dummy_index(bindings, out_dir) 257 else: 258 write_bindings_rst(vnd_lookup, out_dir) 259 write_orphans(bindings, base_binding, vnd_lookup, out_dir) 260 261def setup_bindings_dir(bindings, out_dir): 262 # Make a set of all the Path objects we will be creating for 263 # out_dir / bindings / {binding_path}.rst. Delete all the ones that 264 # shouldn't be there. Make sure the bindings output directory 265 # exists. 266 267 paths = set() 268 bindings_dir = out_dir / 'bindings' 269 logger.info('making output subdirectory %s', bindings_dir) 270 bindings_dir.mkdir(parents=True, exist_ok=True) 271 272 for binding in bindings: 273 paths.add(bindings_dir / binding_filename(binding)) 274 275 for dirpath, _, filenames in os.walk(bindings_dir): 276 for filename in filenames: 277 path = Path(dirpath) / filename 278 if path not in paths: 279 logger.info('removing unexpected file %s', path) 280 path.unlink() 281 282 283def write_dummy_index(bindings, out_dir): 284 # Write out_dir / bindings.rst, with dummy anchors 285 286 # header 287 content = '\n'.join(( 288 '.. _devicetree_binding_index:', 289 '.. _dt_vendor_zephyr:', 290 '', 291 'Dummy bindings index', 292 '####################', 293 '', 294 )) 295 296 # build compatibles set and dump it 297 compatibles = {binding.compatible for binding in bindings} 298 content += '\n'.join(( 299 f'.. dtcompatible:: {compatible}' for compatible in compatibles 300 )) 301 302 write_if_updated(out_dir / 'bindings.rst', content) 303 304 305def write_bindings_rst(vnd_lookup, out_dir): 306 # Write out_dir / bindings.rst, the top level index of bindings. 307 308 string_io = io.StringIO() 309 310 print_block(f'''\ 311 .. _devicetree_binding_index: 312 313 Bindings index 314 ############## 315 316 This page documents the available devicetree bindings. 317 See {zref('dt-bindings')} for an introduction to the Zephyr bindings 318 file format. 319 320 Vendor index 321 ************ 322 323 This section contains an index of hardware vendors. 324 Click on a vendor's name to go to the list of bindings for 325 that vendor. 326 327 .. rst-class:: rst-columns 328 ''', string_io) 329 330 for vnd, bindings in vnd_lookup.vnd2bindings.items(): 331 if len(bindings) == 0: 332 continue 333 print(f'- :ref:`{vnd_lookup.target(vnd)}`', file=string_io) 334 335 print_block('''\ 336 337 Bindings by vendor 338 ****************** 339 340 This section contains available bindings, grouped by vendor. 341 Within each group, bindings are listed by the "compatible" property 342 they apply to, like this: 343 344 **Vendor name (vendor prefix)** 345 346 .. rst-class:: rst-columns 347 348 - <compatible-A> 349 - <compatible-B> (on <bus-name> bus) 350 - <compatible-C> 351 - ... 352 353 The text "(on <bus-name> bus)" appears when bindings may behave 354 differently depending on the bus the node appears on. 355 For example, this applies to some sensor device nodes, which may 356 appear as children of either I2C or SPI bus nodes. 357 ''', string_io) 358 359 for vnd, bindings in vnd_lookup.vnd2bindings.items(): 360 if isinstance(vnd, tuple): 361 title = vnd[0] 362 else: 363 title = vnd_lookup.vendor(vnd).strip() 364 if isinstance(vnd, str): 365 title += f' ({vnd})' 366 underline = '=' * len(title) 367 368 if len(bindings) == 0: 369 continue 370 371 print_block(f'''\ 372 .. _{vnd_lookup.target(vnd)}: 373 374 {title} 375 {underline} 376 377 .. rst-class:: rst-columns 378 ''', string_io) 379 for binding in bindings: 380 print(f'- :ref:`{binding_ref_target(binding)}`', file=string_io) 381 print(file=string_io) 382 383 write_if_updated(out_dir / 'bindings.rst', string_io.getvalue()) 384 385def write_orphans(bindings, base_binding, vnd_lookup, out_dir): 386 # Write out_dir / bindings / foo / binding_page.rst for each binding 387 # in 'bindings', along with any "disambiguation" pages needed when a 388 # single compatible string can be handled by multiple bindings. 389 # 390 # These files are 'orphans' in the Sphinx sense: they are not in 391 # any toctree. 392 393 logging.info('updating :orphan: files for %d bindings', len(bindings)) 394 num_written = 0 395 396 # First, figure out which compatibles map to multiple bindings. We 397 # need this information to decide which of the generated files for 398 # a compatible are "disambiguation" pages that point to per-bus 399 # binding pages, and which ones aren't. 400 401 compat2bindings = defaultdict(list) 402 for binding in bindings: 403 compat2bindings[binding.compatible].append(binding) 404 dup_compat2bindings = {compatible: bindings for compatible, bindings 405 in compat2bindings.items() if len(bindings) > 1} 406 407 # Next, write the per-binding pages. These contain the 408 # per-compatible targets for compatibles not in 'dup_compats'. 409 # We'll finish up by writing per-compatible "disambiguation" pages 410 # for compatibles in 'dup_compats'. 411 412 # Names of properties in base.yaml. 413 base_names = set(base_binding.prop2specs.keys()) 414 for binding in bindings: 415 string_io = io.StringIO() 416 417 print_binding_page(binding, base_names, vnd_lookup, 418 dup_compat2bindings, string_io) 419 420 written = write_if_updated(out_dir / 'bindings' / 421 binding_filename(binding), 422 string_io.getvalue()) 423 424 if written: 425 num_written += 1 426 427 # Generate disambiguation pages for dup_compats. 428 compatibles_dir = out_dir / 'compatibles' 429 setup_compatibles_dir(dup_compat2bindings.keys(), compatibles_dir) 430 for compatible in dup_compat2bindings: 431 string_io = io.StringIO() 432 433 print_compatible_disambiguation_page( 434 compatible, dup_compat2bindings[compatible], string_io) 435 436 written = write_if_updated(compatibles_dir / 437 compatible_filename(compatible), 438 string_io.getvalue()) 439 440 if written: 441 num_written += 1 442 443 logging.info('done writing :orphan: files; %d files needed updates', 444 num_written) 445 446def print_binding_page(binding, base_names, vnd_lookup, dup_compats, 447 string_io): 448 # Print the rst content for 'binding' to 'string_io'. The 449 # 'dup_compats' argument should support membership testing for 450 # compatibles which have multiple associated bindings; if 451 # 'binding.compatible' is not in it, then the ref target for the 452 # entire compatible is generated in this page as well. 453 454 # :orphan: 455 # 456 # .. ref_target: 457 # 458 # Title [(on <bus> bus)] 459 # ###################### 460 if binding.on_bus: 461 on_bus_title = f' (on {binding.on_bus} bus)' 462 else: 463 on_bus_title = '' 464 compatible = binding.compatible 465 466 title = f'{compatible}{on_bus_title}' 467 underline = '#' * len(title) 468 if compatible not in dup_compats: 469 # If this binding is the only one that handles this 470 # compatible, point the ".. dtcompatible:" directive straight 471 # to this page. There's no need for disambiguation. 472 dtcompatible = f'.. dtcompatible:: {binding.compatible}' 473 else: 474 # This compatible is handled by multiple bindings; 475 # its ".. dtcompatible::" should be in a disambiguation page 476 # instead. 477 dtcompatible = '' 478 479 print_block(f'''\ 480 :orphan: 481 482 .. raw:: html 483 484 <!-- 485 FIXME: do not limit page width until content uses another representation 486 format other than tables 487 --> 488 <style>.wy-nav-content {{ max-width: none; !important }}</style> 489 490 {dtcompatible} 491 .. _{binding_ref_target(binding)}: 492 493 {title} 494 {underline} 495 ''', string_io) 496 497 # Vendor: <link-to-vendor-section> 498 vnd = compatible_vnd(compatible) 499 print('Vendor: ' 500 f':ref:`{vnd_lookup.vendor(vnd)} <{vnd_lookup.target(vnd)}>`\n', 501 file=string_io) 502 503 # Binding description. 504 if binding.bus: 505 bus_help = f'These nodes are "{binding.bus}" bus nodes.' 506 else: 507 bus_help = '' 508 print_block(f'''\ 509 Description 510 *********** 511 512 {bus_help} 513 ''', string_io) 514 print(to_code_block(binding.description.strip()), file=string_io) 515 516 # Properties. 517 print_block('''\ 518 Properties 519 ********** 520 ''', string_io) 521 print_top_level_properties(binding, base_names, string_io) 522 print_child_binding_properties(binding, string_io) 523 524 # Specifier cells. 525 # 526 # This presentation isn't particularly nice. Perhaps something 527 # better can be done for future work. 528 if binding.specifier2cells: 529 print_block('''\ 530 Specifier cell names 531 ******************** 532 ''', string_io) 533 for specifier, cells in binding.specifier2cells.items(): 534 print(f'- {specifier} cells: {", ".join(cells)}', 535 file=string_io) 536 537def print_top_level_properties(binding, base_names, string_io): 538 # Print the RST for top level properties for 'binding' to 'string_io'. 539 # 540 # The 'base_names' set contains all the base.yaml properties. 541 542 def prop_table(filter_fn, deprecated): 543 # Get a properly formatted and indented table of properties. 544 specs = [prop_spec for prop_spec in binding.prop2specs.values() 545 if filter_fn(prop_spec)] 546 indent = ' ' * 14 547 if specs: 548 temp_io = io.StringIO() 549 print_property_table(specs, temp_io, deprecated=deprecated) 550 return textwrap.indent(temp_io.getvalue(), indent) 551 552 return indent + '(None)' 553 554 def node_props_filter(prop_spec): 555 return prop_spec.name not in base_names and not prop_spec.deprecated 556 557 def deprecated_node_props_filter(prop_spec): 558 return prop_spec.name not in base_names and prop_spec.deprecated 559 560 def base_props_filter(prop_spec): 561 return prop_spec.name in base_names 562 563 if binding.child_binding: 564 print_block('''\ 565 Top level properties 566 ==================== 567 ''', string_io) 568 if binding.prop2specs: 569 if binding.child_binding: 570 print_block(f''' 571 These property descriptions apply to "{binding.compatible}" 572 nodes themselves. This page also describes child node 573 properties in the following sections. 574 ''', string_io) 575 576 577 print_block(f'''\ 578 .. tabs:: 579 580 .. group-tab:: Node specific properties 581 582 Properties not inherited from the base binding file. 583 584{prop_table(node_props_filter, False)} 585 586 .. group-tab:: Deprecated node specific properties 587 588 Deprecated properties not inherited from the base binding file. 589 590{prop_table(deprecated_node_props_filter, False)} 591 592 .. group-tab:: Base properties 593 594 Properties inherited from the base binding file, which defines 595 common properties that may be set on many nodes. Not all of these 596 may apply to the "{binding.compatible}" compatible. 597 598{prop_table(base_props_filter, True)} 599 600 ''', string_io) 601 else: 602 print('No top-level properties.\n', file=string_io) 603 604def print_child_binding_properties(binding, string_io): 605 # Prints property tables for all levels of nesting of child 606 # bindings. 607 608 level = 1 609 child = binding.child_binding 610 while child is not None: 611 if level == 1: 612 level_string = 'Child' 613 elif level == 2: 614 level_string = 'Grandchild' 615 else: 616 level_string = f'Level {level} child' 617 if child.prop2specs: 618 title = f'{level_string} node properties' 619 underline = '=' * len(title) 620 print(f'{title}\n{underline}\n', file=string_io) 621 print_property_table(child.prop2specs.values(), string_io, 622 deprecated=True) 623 child = child.child_binding 624 level += 1 625 626def print_property_table(prop_specs, string_io, deprecated=False): 627 # Writes a table of properties based on 'prop_specs', an iterable 628 # of edtlib.PropertySpec objects, to 'string_io'. 629 # 630 # If 'deprecated' is true and the property is deprecated, an extra 631 # line is printed mentioning that fact. We allow this to be turned 632 # off for tables where all properties are deprecated, so it's 633 # clear from context. 634 635 # Table header. 636 print_block('''\ 637 .. list-table:: 638 :widths: 1 1 4 639 :header-rows: 1 640 641 * - Name 642 - Type 643 - Details 644 ''', string_io) 645 646 def to_prop_table_row(prop_spec): 647 # Get a multiline string for a PropertySpec table row. 648 649 # The description column combines the description field, 650 # along with things like the default value or enum values. 651 # 652 # The property 'description' field from the binding may span 653 # one or multiple lines. We try to come up with a nice 654 # presentation for each. 655 details = '' 656 raw_prop_descr = prop_spec.description 657 if raw_prop_descr: 658 details += to_code_block(raw_prop_descr) 659 660 if prop_spec.required: 661 details += '\n\nThis property is **required**.' 662 663 if prop_spec.default: 664 details += f'\n\nDefault value: ``{prop_spec.default}``' 665 666 if prop_spec.const: 667 details += f'\n\nConstant value: ``{prop_spec.const}``' 668 elif prop_spec.enum: 669 details += ('\n\nLegal values: ' + 670 ', '.join(f'``{repr(val)}``' for val in 671 prop_spec.enum)) 672 673 if prop_spec.name in DETAILS_IN_IMPORTANT_PROPS: 674 details += (f'\n\nSee {zref("dt-important-props")} for more ' 675 'information.') 676 677 if deprecated and prop_spec.deprecated: 678 details += '\n\nThis property is **deprecated**.' 679 680 return f"""\ 681 * - ``{prop_spec.name}`` 682 - ``{prop_spec.type}`` 683 - {textwrap.indent(details, ' ' * 7).lstrip()} 684""" 685 686 # Print each row. 687 for prop_spec in prop_specs: 688 print(to_prop_table_row(prop_spec), file=string_io) 689 690def setup_compatibles_dir(compatibles, compatibles_dir): 691 # Make a set of all the Path objects we will be creating for 692 # out_dir / compatibles / {compatible_path}.rst. Delete all the ones that 693 # shouldn't be there. Make sure the compatibles output directory 694 # exists. 695 696 logger.info('making output subdirectory %s', compatibles_dir) 697 compatibles_dir.mkdir(parents=True, exist_ok=True) 698 699 paths = set(compatibles_dir / compatible_filename(compatible) 700 for compatible in compatibles) 701 702 for path in compatibles_dir.iterdir(): 703 if path not in paths: 704 logger.info('removing unexpected file %s', path) 705 path.unlink() 706 707 708def print_compatible_disambiguation_page(compatible, bindings, string_io): 709 # Print the disambiguation page for 'compatible', which can be 710 # handled by any of the bindings in 'bindings', to 'string_io'. 711 712 assert len(bindings) > 1, (compatible, bindings) 713 714 underline = '#' * len(compatible) 715 output_list = '\n '.join(f'- :ref:`{binding_ref_target(binding)}`' 716 for binding in bindings) 717 718 print_block(f'''\ 719 :orphan: 720 721 .. dtcompatible:: {compatible} 722 723 {compatible} 724 {underline} 725 726 The devicetree compatible ``{compatible}`` may be handled by any 727 of the following bindings: 728 729 {output_list} 730 ''', string_io) 731 732def print_block(block, string_io): 733 # Helper for dedenting and printing a triple-quoted RST block. 734 # (Just a block of text, not necessarily just a 'code-block' 735 # directive.) 736 737 print(textwrap.dedent(block), file=string_io) 738 739def to_code_block(s, indent=0): 740 # Converts 's', a string, to an indented rst .. code-block::. The 741 # 'indent' argument is a leading indent for each line in the code 742 # block, in spaces. 743 indent = indent * ' ' 744 return ('.. code-block:: none\n\n' + 745 textwrap.indent(s, indent + ' ') + '\n') 746 747def compatible_vnd(compatible): 748 # Get the vendor prefix for a compatible string 'compatible'. 749 # 750 # For example, compatible_vnd('foo,device') is 'foo'. 751 # 752 # If 'compatible' has no comma (','), None is returned. 753 754 if ',' not in compatible: 755 return None 756 757 return compatible.split(',', 1)[0] 758 759def compatible_filename(compatible): 760 # Name of the per-compatible disambiguation page within the 761 # out_dir / compatibles directory. 762 763 return f'{compatible}.rst' 764 765def zref(target, text=None): 766 # Make an appropriate RST :ref:`text <target>` or :ref:`target` 767 # string to a zephyr documentation ref target 'target', and return 768 # it. 769 # 770 # By default, the bindings docs are in the main Zephyr 771 # documentation, but this script supports putting them in a 772 # separate Sphinx doc set. Since we also link to Zephyr 773 # documentation from the generated content, we have an environment 774 # variable based escape hatch for putting the target in the zephyr 775 # doc set. 776 # 777 # This relies on intersphinx: 778 # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html 779 780 docset = os.environ.get('GEN_DEVICETREE_REST_ZEPHYR_DOCSET', '') 781 782 if docset.strip(): 783 target = f'{docset}:{target}' 784 785 if text: 786 return f':ref:`{text} <{target}>`' 787 788 return f':ref:`{target}`' 789 790def binding_filename(binding): 791 # Returns the output file name for a binding relative to the 792 # directory containing documentation for all bindings. It does 793 # this by stripping off the '.../dts/bindings/' prefix common to 794 # all bindings files in a DTS_ROOT directory. 795 # 796 # For example, for .../zephyr/dts/bindings/base/base.yaml, this 797 # would return 'base/base.yaml'. 798 # 799 # Hopefully that's unique across roots. If not, we'll need to 800 # update this function. 801 802 as_posix = Path(binding.path).as_posix() 803 dts_bindings = 'dts/bindings/' 804 idx = as_posix.rfind(dts_bindings) 805 806 if idx == -1: 807 raise ValueError(f'binding path has no {dts_bindings}: {binding.path}') 808 809 # Cut past dts/bindings, strip off the extension (.yaml or .yml), and 810 # replace with .rst. 811 return os.path.splitext(as_posix[idx + len(dts_bindings):])[0] + '.rst' 812 813def binding_ref_target(binding): 814 # Return the sphinx ':ref:' target name for a binding. 815 816 stem = Path(binding.path).stem 817 return 'dtbinding_' + re.sub('[/,-]', '_', stem) 818 819def write_if_updated(path, s): 820 # gen_helpers.write_if_updated() wrapper that handles logging and 821 # creating missing parents, as needed. 822 823 if not path.parent.is_dir(): 824 path.parent.mkdir(parents=True) 825 written = gen_helpers.write_if_updated(path, s) 826 logger.debug('%s %s', 'wrote' if written else 'did NOT write', path) 827 return written 828 829 830if __name__ == '__main__': 831 main() 832 sys.exit(0) 833