1# Copyright (c) 2020 Nordic Semiconductor ASA 2# SPDX-License-Identifier: Apache-2.0 3 4""" 5Like gen_kconfig_rest.py, but for generating an index of existing 6devicetree bindings. 7""" 8 9import argparse 10import glob 11import io 12import logging 13import os 14import pprint 15import re 16import sys 17import textwrap 18from collections import defaultdict 19from pathlib import Path 20 21import gen_helpers 22from devicetree import edtlib 23 24ZEPHYR_BASE = Path(__file__).parents[2] 25 26GENERIC_OR_VENDOR_INDEPENDENT = 'Generic or vendor-independent' 27UNKNOWN_VENDOR = 'Unknown vendor' 28 29ZEPHYR_BASE = Path(__file__).parents[2] 30 31# Base properties that have documentation in 'dt-important-props'. 32DETAILS_IN_IMPORTANT_PROPS = {'compatible', 'label', 'reg', 'status', 'interrupts'} 33 34logger = logging.getLogger('gen_devicetree_rest') 35 36class VndLookup: 37 """ 38 A convenience class for looking up information based on a 39 devicetree compatible's vendor prefix 'vnd'. 40 """ 41 42 def __init__(self, vendor_prefixes, bindings): 43 self.vnd2vendor = self.load_vnd2vendor(vendor_prefixes) 44 self.vnd2bindings = self.init_vnd2bindings(bindings) 45 self.vnd2ref_target = self.init_vnd2ref_target() 46 47 def vendor(self, vnd): 48 return self.vnd2vendor.get(vnd, UNKNOWN_VENDOR) 49 50 def bindings(self, vnd, default=None): 51 return self.vnd2bindings.get(vnd, default) 52 53 def target(self, vnd): 54 return self.vnd2ref_target.get( 55 vnd, self.vnd2ref_target[(UNKNOWN_VENDOR,)]) 56 57 @staticmethod 58 def load_vnd2vendor(vendor_prefixes): 59 # Load the vendor-prefixes.txt file. Return a dict mapping 'vnd' 60 # vendor prefixes as they are found in compatible properties to 61 # each vendor's full name. 62 # 63 # For example, this line: 64 # 65 # vnd A stand-in for a real vendor 66 # 67 # Gets split into a key 'vnd' and a value 'A stand-in for a real 68 # vendor' in the return value. 69 # 70 # The 'None' key maps to GENERIC_OR_VENDOR_INDEPENDENT. 71 72 vnd2vendor = { 73 None: GENERIC_OR_VENDOR_INDEPENDENT, 74 } 75 vnd2vendor.update(edtlib.load_vendor_prefixes_txt(vendor_prefixes)) 76 77 logger.info('found %d vendor prefixes in %s', len(vnd2vendor) - 1, 78 vendor_prefixes) 79 if logger.isEnabledFor(logging.DEBUG): 80 logger.debug('vnd2vendor=%s', pprint.pformat(vnd2vendor)) 81 82 return vnd2vendor 83 84 def init_vnd2bindings(self, bindings): 85 # Take a 'vnd2vendor' map and a list of bindings and return a dict 86 # mapping 'vnd' vendor prefixes prefixes to lists of bindings. The 87 # bindings in each list are sorted by compatible. The keys in the 88 # return value are sorted by vendor name. 89 # 90 # Special cases: 91 # 92 # - The 'None' key maps to bindings with no vendor prefix 93 # in their compatibles, like 'gpio-keys'. This is the first key. 94 # - The (UNKNOWN_VENDOR,) key maps to bindings whose compatible 95 # has a vendor prefix that exists, but is not known, 96 # like 'somethingrandom,device'. This is the last key. 97 98 # Get an unsorted dict mapping vendor prefixes to lists of bindings. 99 unsorted = defaultdict(list) 100 generic_bindings = [] 101 unknown_vendor_bindings = [] 102 for binding in bindings: 103 vnd = compatible_vnd(binding.compatible) 104 if vnd is None: 105 generic_bindings.append(binding) 106 elif vnd in self.vnd2vendor: 107 unsorted[vnd].append(binding) 108 else: 109 unknown_vendor_bindings.append(binding) 110 111 # Key functions for sorting. 112 def vnd_key(vnd): 113 return self.vnd2vendor[vnd].casefold() 114 115 def binding_key(binding): 116 return binding.compatible 117 118 # Sort the bindings for each vendor by compatible. 119 # Plain dicts are sorted in CPython 3.6+, which is what we 120 # support, so the return dict's keys are in the same 121 # order as vnd2vendor. 122 # 123 # The unknown-vendor bindings being inserted as a 1-tuple key is a 124 # hack for convenience that ensures they won't collide with a 125 # known vendor. The code that consumes the dict below handles 126 # this. 127 vnd2bindings = { 128 None: sorted(generic_bindings, key=binding_key) 129 } 130 for vnd in sorted(unsorted, key=vnd_key): 131 vnd2bindings[vnd] = sorted(unsorted[vnd], key=binding_key) 132 vnd2bindings[(UNKNOWN_VENDOR,)] = sorted(unknown_vendor_bindings, 133 key=binding_key) 134 135 if logger.isEnabledFor(logging.DEBUG): 136 logger.debug('vnd2bindings: %s', pprint.pformat(vnd2bindings)) 137 138 return vnd2bindings 139 140 def init_vnd2ref_target(self): 141 # The return value, vnd2ref_target, is a dict mapping vendor 142 # prefixes to ref targets for their relevant sections in this 143 # file, with these special cases: 144 # 145 # - The None key maps to the ref target for bindings with no 146 # vendor prefix in their compatibles, like 'gpio-keys' 147 # - The (UNKNOWN_VENDOR,) key maps to the ref target for bindings 148 # whose compatible has a vendor prefix that is not recognized. 149 vnd2ref_target = {} 150 151 for vnd in self.vnd2bindings: 152 if vnd is None: 153 vnd2ref_target[vnd] = 'dt_no_vendor' 154 elif isinstance(vnd, str): 155 vnd2ref_target[vnd] = f'dt_vendor_{vnd}' 156 else: 157 assert vnd == (UNKNOWN_VENDOR,), vnd 158 vnd2ref_target[vnd] = 'dt_unknown_vendor' 159 160 return vnd2ref_target 161 162def main(): 163 args = parse_args() 164 setup_logging(args.verbose) 165 bindings = load_bindings(args.dts_roots, args.dts_folders, args.dts_files) 166 base_binding = load_base_binding() 167 driver_sources = load_driver_sources() 168 vnd_lookup = VndLookup(args.vendor_prefixes, bindings) 169 dump_content(bindings, base_binding, vnd_lookup, driver_sources, args.out_dir, 170 args.turbo_mode) 171 172def parse_args(): 173 # Parse command line arguments from sys.argv. 174 175 parser = argparse.ArgumentParser(allow_abbrev=False) 176 parser.add_argument('-v', '--verbose', default=0, action='count', 177 help='increase verbosity; may be given multiple times') 178 parser.add_argument('--vendor-prefixes', required=True, 179 help='vendor-prefixes.txt file path') 180 parser.add_argument('--dts-root', dest='dts_roots', action='append', 181 help='''additional DTS root directory as it would 182 be set in DTS_ROOTS''') 183 parser.add_argument('--dts-folder', dest='dts_folders', action='append', default=[], 184 help='additional DTS folders containing binding files') 185 parser.add_argument('--dts-file', dest='dts_files', action='append', default=[], 186 help='additional individual DTS binding files') 187 parser.add_argument('--turbo-mode', action='store_true', 188 help='Enable turbo mode (dummy references)') 189 parser.add_argument('out_dir', help='output files are generated here') 190 191 return parser.parse_args() 192 193def setup_logging(verbose): 194 if verbose >= 2: 195 log_level = logging.DEBUG 196 elif verbose == 1: 197 log_level = logging.INFO 198 else: 199 log_level = logging.ERROR 200 logging.basicConfig(format='%(filename)s:%(levelname)s: %(message)s', 201 level=log_level) 202 203def load_bindings(dts_roots, dts_folders, dts_files): 204 # Get a list of edtlib.Binding objects from searching 'dts_roots'. 205 206 if not dts_roots: 207 sys.exit('no DTS roots; use --dts-root to specify at least one') 208 209 binding_files = [] 210 for dts_root in dts_roots: 211 binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yml', 212 recursive=True)) 213 binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yaml', 214 recursive=True)) 215 for folders in dts_folders: 216 binding_files.extend(glob.glob(f'{folders}/*.yml', recursive=False)) 217 binding_files.extend(glob.glob(f'{folders}/*.yaml', recursive=False)) 218 binding_files.extend(dts_files) 219 220 bindings = edtlib.bindings_from_paths(binding_files, ignore_errors=True) 221 222 num_total = len(bindings) 223 224 # Remove bindings from the 'vnd' vendor, which is not a real vendor, 225 # but rather a stand-in we use for examples and tests when a real 226 # vendor would be inappropriate. 227 bindings = [binding for binding in bindings if 228 compatible_vnd(binding.compatible) != 'vnd'] 229 230 logger.info('found %d bindings (ignored %d) in this dts_roots list: %s', 231 len(bindings), num_total - len(bindings), dts_roots) 232 233 return bindings 234 235def load_base_binding(): 236 # Make a Binding object for base.yaml. 237 # 238 # This helps separate presentation for properties common to all 239 # nodes from node-specific properties. 240 241 base_yaml = ZEPHYR_BASE / 'dts' / 'bindings' / 'base' / 'base.yaml' 242 base_includes = {"pm.yaml": os.fspath(ZEPHYR_BASE / 'dts' / 'bindings' / 'base'/ 'pm.yaml')} 243 244 if not base_yaml.is_file(): 245 sys.exit(f'Expected to find base.yaml at {base_yaml}') 246 return edtlib.Binding(os.fspath(base_yaml), base_includes, require_compatible=False, 247 require_description=False) 248 249def load_driver_sources(): 250 driver_sources = {} 251 dt_drv_compat_occurrences = defaultdict(list) 252 253 dt_drv_compat_pattern = re.compile(r"#define DT_DRV_COMPAT\s+(.*)") 254 device_dt_inst_define_pattern = re.compile(r"DEVICE_DT_INST_DEFINE") 255 256 folders_to_scan = ["boards", "drivers", "modules", "soc", "subsys"] 257 258 # When looking at folders_to_scan, a file is considered as a likely driver source if: 259 # - There is only one and only one file with a "#define DT_DRV_COMPAT <compatible>" for a given 260 # compatible. 261 # - or, a file contains both a "#define DT_DRV_COMPAT <compatible>" and a 262 # DEVICE_DT_INST_DEFINE(...) call. 263 264 for folder in folders_to_scan: 265 for dirpath, _, filenames in os.walk(ZEPHYR_BASE / folder): 266 for filename in filenames: 267 if not filename.endswith(('.c', '.h')): 268 continue 269 filepath = Path(dirpath) / filename 270 with open(filepath, encoding="utf-8") as f: 271 content = f.read() 272 273 relative_path = filepath.relative_to(ZEPHYR_BASE) 274 275 # Find all DT_DRV_COMPAT occurrences in the file 276 dt_drv_compat_matches = dt_drv_compat_pattern.findall(content) 277 for compatible in dt_drv_compat_matches: 278 dt_drv_compat_occurrences[compatible].append(relative_path) 279 280 if dt_drv_compat_matches and device_dt_inst_define_pattern.search(content): 281 for compatible in dt_drv_compat_matches: 282 if compatible in driver_sources: 283 # Mark as ambiguous if multiple files define the same compatible 284 driver_sources[compatible] = None 285 else: 286 driver_sources[compatible] = relative_path 287 288 # Remove ambiguous driver sources 289 driver_sources = {k: v for k, v in driver_sources.items() if v is not None} 290 291 # Consider DT_DRV_COMPATs with only one occurrence as driver sources 292 for compatible, occurrences in dt_drv_compat_occurrences.items(): 293 if compatible not in driver_sources and len(occurrences) == 1: 294 path = occurrences[0] 295 # Assume the driver is defined in the enclosing folder if it's a header file 296 if path.suffix == ".h": 297 path = path.parent 298 driver_sources[compatible] = path 299 300 return driver_sources 301 302def dump_content(bindings, base_binding, vnd_lookup, driver_sources, out_dir, turbo_mode): 303 # Dump the generated .rst files for a vnd2bindings dict. 304 # Files are only written if they are changed. Existing .rst 305 # files which would not be written by the 'vnd2bindings' 306 # dict are removed. 307 308 out_dir = Path(out_dir) 309 310 setup_bindings_dir(bindings, out_dir) 311 if turbo_mode: 312 write_dummy_index(bindings, out_dir) 313 else: 314 write_bindings_rst(vnd_lookup, out_dir) 315 write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir) 316 317def setup_bindings_dir(bindings, out_dir): 318 # Make a set of all the Path objects we will be creating for 319 # out_dir / bindings / {binding_path}.rst. Delete all the ones that 320 # shouldn't be there. Make sure the bindings output directory 321 # exists. 322 323 paths = set() 324 bindings_dir = out_dir / 'bindings' 325 logger.info('making output subdirectory %s', bindings_dir) 326 bindings_dir.mkdir(parents=True, exist_ok=True) 327 328 for binding in bindings: 329 paths.add(bindings_dir / binding_filename(binding)) 330 331 for dirpath, _, filenames in os.walk(bindings_dir): 332 for filename in filenames: 333 path = Path(dirpath) / filename 334 if path not in paths: 335 logger.info('removing unexpected file %s', path) 336 path.unlink() 337 338 339def write_dummy_index(bindings, out_dir): 340 # Write out_dir / bindings.rst, with dummy anchors 341 342 # header 343 content = '\n'.join(( 344 '.. _devicetree_binding_index:', 345 '.. _dt_vendor_zephyr:', 346 '', 347 'Dummy bindings index', 348 '####################', 349 '', 350 )) 351 352 # build compatibles set and dump it 353 compatibles = {binding.compatible for binding in bindings} 354 content += '\n'.join( 355 f'.. dtcompatible:: {compatible}' for compatible in compatibles 356 ) 357 358 write_if_updated(out_dir / 'bindings.rst', content) 359 360 361def write_bindings_rst(vnd_lookup, out_dir): 362 # Write out_dir / bindings.rst, the top level index of bindings. 363 364 string_io = io.StringIO() 365 366 print_block(f'''\ 367 .. _devicetree_binding_index: 368 369 Bindings index 370 ############## 371 372 This page documents the available devicetree bindings. 373 See {zref('dt-bindings')} for an introduction to the Zephyr bindings 374 file format. 375 376 Vendor index 377 ************ 378 379 This section contains an index of hardware vendors. 380 Click on a vendor's name to go to the list of bindings for 381 that vendor. 382 383 .. rst-class:: rst-columns 384 ''', string_io) 385 386 for vnd, bindings in vnd_lookup.vnd2bindings.items(): 387 if len(bindings) == 0: 388 continue 389 print(f'- :ref:`{vnd_lookup.target(vnd)}`', file=string_io) 390 391 print_block('''\ 392 393 Bindings by vendor 394 ****************** 395 396 This section contains available bindings, grouped by vendor. 397 Within each group, bindings are listed by the "compatible" property 398 they apply to, like this: 399 400 **Vendor name (vendor prefix)** 401 402 .. rst-class:: rst-columns 403 404 - <compatible-A> 405 - <compatible-B> (on <bus-name> bus) 406 - <compatible-C> 407 - ... 408 409 The text "(on <bus-name> bus)" appears when bindings may behave 410 differently depending on the bus the node appears on. 411 For example, this applies to some sensor device nodes, which may 412 appear as children of either I2C or SPI bus nodes. 413 ''', string_io) 414 415 for vnd, bindings in vnd_lookup.vnd2bindings.items(): 416 if isinstance(vnd, tuple): 417 title = vnd[0] 418 else: 419 title = vnd_lookup.vendor(vnd).strip() 420 if isinstance(vnd, str): 421 title += f' ({vnd})' 422 underline = '=' * len(title) 423 424 if len(bindings) == 0: 425 continue 426 427 print_block(f'''\ 428 .. _{vnd_lookup.target(vnd)}: 429 430 {title} 431 {underline} 432 433 .. rst-class:: rst-columns 434 ''', string_io) 435 for binding in bindings: 436 print(f'- :ref:`{binding_ref_target(binding)}`', file=string_io) 437 print(file=string_io) 438 439 write_if_updated(out_dir / 'bindings.rst', string_io.getvalue()) 440 441def write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir): 442 # Write out_dir / bindings / foo / binding_page.rst for each binding 443 # in 'bindings', along with any "disambiguation" pages needed when a 444 # single compatible string can be handled by multiple bindings. 445 # 446 # These files are 'orphans' in the Sphinx sense: they are not in 447 # any toctree. 448 449 logging.info('updating :orphan: files for %d bindings', len(bindings)) 450 num_written = 0 451 452 # First, figure out which compatibles map to multiple bindings. We 453 # need this information to decide which of the generated files for 454 # a compatible are "disambiguation" pages that point to per-bus 455 # binding pages, and which ones aren't. 456 457 compat2bindings = defaultdict(list) 458 for binding in bindings: 459 compat2bindings[binding.compatible].append(binding) 460 dup_compat2bindings = {compatible: bindings for compatible, bindings 461 in compat2bindings.items() if len(bindings) > 1} 462 463 # Next, write the per-binding pages. These contain the 464 # per-compatible targets for compatibles not in 'dup_compats'. 465 # We'll finish up by writing per-compatible "disambiguation" pages 466 # for compatibles in 'dup_compats'. 467 468 # Names of properties in base.yaml. 469 base_names = set(base_binding.prop2specs.keys()) 470 for binding in bindings: 471 string_io = io.StringIO() 472 473 print_binding_page(binding, base_names, vnd_lookup, 474 driver_sources, dup_compat2bindings, string_io) 475 476 written = write_if_updated(out_dir / 'bindings' / 477 binding_filename(binding), 478 string_io.getvalue()) 479 480 if written: 481 num_written += 1 482 483 # Generate disambiguation pages for dup_compats. 484 compatibles_dir = out_dir / 'compatibles' 485 setup_compatibles_dir(dup_compat2bindings.keys(), compatibles_dir) 486 for compatible in dup_compat2bindings: 487 string_io = io.StringIO() 488 489 print_compatible_disambiguation_page( 490 compatible, dup_compat2bindings[compatible], string_io) 491 492 written = write_if_updated(compatibles_dir / 493 compatible_filename(compatible), 494 string_io.getvalue()) 495 496 if written: 497 num_written += 1 498 499 logging.info('done writing :orphan: files; %d files needed updates', 500 num_written) 501 502def make_sidebar(compatible, vendor_name, vendor_ref_target, driver_path=None): 503 lines = [ 504 ".. sidebar:: Overview", 505 "", 506 f" :Name: ``{compatible}``", 507 f" :Vendor: :ref:`{vendor_name} <{vendor_ref_target}>`", 508 f" :Used in: :zephyr:board-catalog:`List of boards <#compatibles={compatible}>` using", 509 " this compatible", 510 ] 511 if driver_path: 512 lines.append(f" :Driver: :zephyr_file:`{driver_path}`") 513 return "\n".join(lines) + "\n" 514 515def print_binding_page(binding, base_names, vnd_lookup, driver_sources,dup_compats, 516 string_io): 517 # Print the rst content for 'binding' to 'string_io'. The 518 # 'dup_compats' argument should support membership testing for 519 # compatibles which have multiple associated bindings; if 520 # 'binding.compatible' is not in it, then the ref target for the 521 # entire compatible is generated in this page as well. 522 523 # :orphan: 524 # 525 # .. ref_target: 526 # 527 # Title [(on <bus> bus)] 528 # ###################### 529 if binding.on_bus: 530 on_bus_title = f' (on {binding.on_bus} bus)' 531 else: 532 on_bus_title = '' 533 compatible = binding.compatible 534 535 title = f'{compatible}{on_bus_title}' 536 underline = '#' * len(title) 537 if compatible not in dup_compats: 538 # If this binding is the only one that handles this 539 # compatible, point the ".. dtcompatible:" directive straight 540 # to this page. There's no need for disambiguation. 541 dtcompatible = f'.. dtcompatible:: {binding.compatible}' 542 else: 543 # This compatible is handled by multiple bindings; 544 # its ".. dtcompatible::" should be in a disambiguation page 545 # instead. 546 dtcompatible = '' 547 548 print_block(f'''\ 549 :orphan: 550 551 .. raw:: html 552 553 <!-- 554 FIXME: do not limit page width until content uses another representation 555 format other than tables 556 --> 557 <style>.wy-nav-content {{ max-width: none; !important }}</style> 558 559 {dtcompatible} 560 .. _{binding_ref_target(binding)}: 561 562 {title} 563 {underline} 564 ''', string_io) 565 566 vnd = compatible_vnd(compatible) 567 vendor_name = vnd_lookup.vendor(vnd) 568 vendor_target = vnd_lookup.target(vnd) 569 driver_path = driver_sources.get(re.sub("[-,.@/+]", "_", compatible.lower())) 570 571 sidebar_content = make_sidebar( 572 compatible=compatible, 573 vendor_name=vendor_name, 574 vendor_ref_target=vendor_target, 575 driver_path=driver_path, 576 ) 577 print_block(sidebar_content, string_io) 578 579 # Binding description. 580 if binding.bus: 581 bus_help = f'These nodes are "{binding.bus}" bus nodes.' 582 else: 583 bus_help = '' 584 print_block(f'''\ 585 Description 586 *********** 587 588 {bus_help} 589 ''', string_io) 590 591 if binding.title: 592 description = ("\n\n" 593 .join([binding.title, binding.description]) 594 .strip()) 595 else: 596 description = binding.description.strip() 597 print(to_code_block(description), file=string_io) 598 599 # Properties. 600 print_block('''\ 601 Properties 602 ********** 603 ''', string_io) 604 print_top_level_properties(binding, base_names, string_io) 605 print_child_binding_properties(binding, string_io) 606 607 # Specifier cells. 608 # 609 # This presentation isn't particularly nice. Perhaps something 610 # better can be done for future work. 611 if binding.specifier2cells: 612 print_block('''\ 613 Specifier cell names 614 ******************** 615 ''', string_io) 616 for specifier, cells in binding.specifier2cells.items(): 617 print(f'- {specifier} cells: {", ".join(cells)}', 618 file=string_io) 619 620def print_top_level_properties(binding, base_names, string_io): 621 # Print the RST for top level properties for 'binding' to 'string_io'. 622 # 623 # The 'base_names' set contains all the base.yaml properties. 624 625 def prop_table(filter_fn, deprecated): 626 # Get a properly formatted and indented table of properties. 627 specs = [prop_spec for prop_spec in binding.prop2specs.values() 628 if filter_fn(prop_spec)] 629 indent = ' ' * 14 630 if specs: 631 temp_io = io.StringIO() 632 print_property_table(specs, temp_io, deprecated=deprecated) 633 return textwrap.indent(temp_io.getvalue(), indent) 634 635 return indent + '(None)' 636 637 def node_props_filter(prop_spec): 638 return prop_spec.name not in base_names and not prop_spec.deprecated 639 640 def deprecated_node_props_filter(prop_spec): 641 return prop_spec.name not in base_names and prop_spec.deprecated 642 643 def base_props_filter(prop_spec): 644 return prop_spec.name in base_names 645 646 if binding.child_binding: 647 print_block('''\ 648 Top level properties 649 ==================== 650 ''', string_io) 651 if binding.prop2specs: 652 if binding.child_binding: 653 print_block(f''' 654 These property descriptions apply to "{binding.compatible}" 655 nodes themselves. This page also describes child node 656 properties in the following sections. 657 ''', string_io) 658 659 660 print_block(f'''\ 661 .. tabs:: 662 663 .. group-tab:: Node specific properties 664 665 Properties not inherited from the base binding file. 666 667{prop_table(node_props_filter, False)} 668 669 .. group-tab:: Deprecated node specific properties 670 671 Deprecated properties not inherited from the base binding file. 672 673{prop_table(deprecated_node_props_filter, False)} 674 675 .. group-tab:: Base properties 676 677 Properties inherited from the base binding file, which defines 678 common properties that may be set on many nodes. Not all of these 679 may apply to the "{binding.compatible}" compatible. 680 681{prop_table(base_props_filter, True)} 682 683 ''', string_io) 684 else: 685 print('No top-level properties.\n', file=string_io) 686 687def print_child_binding_properties(binding, string_io): 688 # Prints property tables for all levels of nesting of child 689 # bindings. 690 691 level = 1 692 child = binding.child_binding 693 while child is not None: 694 if level == 1: 695 level_string = 'Child' 696 elif level == 2: 697 level_string = 'Grandchild' 698 else: 699 level_string = f'Level {level} child' 700 if child.prop2specs: 701 title = f'{level_string} node properties' 702 underline = '=' * len(title) 703 print(f'{title}\n{underline}\n', file=string_io) 704 print_property_table(child.prop2specs.values(), string_io, 705 deprecated=True) 706 child = child.child_binding 707 level += 1 708 709def print_property_table(prop_specs, string_io, deprecated=False): 710 # Writes a table of properties based on 'prop_specs', an iterable 711 # of edtlib.PropertySpec objects, to 'string_io'. 712 # 713 # If 'deprecated' is true and the property is deprecated, an extra 714 # line is printed mentioning that fact. We allow this to be turned 715 # off for tables where all properties are deprecated, so it's 716 # clear from context. 717 718 # Table header. 719 print_block('''\ 720 .. list-table:: 721 :widths: 1 1 4 722 :header-rows: 1 723 724 * - Name 725 - Type 726 - Details 727 ''', string_io) 728 729 def to_prop_table_row(prop_spec): 730 # Get a multiline string for a PropertySpec table row. 731 732 # The description column combines the description field, 733 # along with things like the default value or enum values. 734 # 735 # The property 'description' field from the binding may span 736 # one or multiple lines. We try to come up with a nice 737 # presentation for each. 738 details = '' 739 raw_prop_descr = prop_spec.description 740 if raw_prop_descr: 741 details += to_code_block(raw_prop_descr) 742 743 if prop_spec.required: 744 details += '\n\nThis property is **required**.' 745 746 if prop_spec.default: 747 details += f'\n\nDefault value: ``{prop_spec.default}``' 748 749 if prop_spec.const: 750 details += f'\n\nConstant value: ``{prop_spec.const}``' 751 elif prop_spec.enum: 752 details += ('\n\nLegal values: ' + 753 ', '.join(f'``{repr(val)}``' for val in 754 prop_spec.enum)) 755 756 if prop_spec.name in DETAILS_IN_IMPORTANT_PROPS: 757 details += (f'\n\nSee {zref("dt-important-props")} for more ' 758 'information.') 759 760 if deprecated and prop_spec.deprecated: 761 details += '\n\nThis property is **deprecated**.' 762 763 return f"""\ 764 * - ``{prop_spec.name}`` 765 - ``{prop_spec.type}`` 766 - {textwrap.indent(details, ' ' * 7).lstrip()} 767""" 768 769 # Print each row. 770 for prop_spec in prop_specs: 771 print(to_prop_table_row(prop_spec), file=string_io) 772 773def setup_compatibles_dir(compatibles, compatibles_dir): 774 # Make a set of all the Path objects we will be creating for 775 # out_dir / compatibles / {compatible_path}.rst. Delete all the ones that 776 # shouldn't be there. Make sure the compatibles output directory 777 # exists. 778 779 logger.info('making output subdirectory %s', compatibles_dir) 780 compatibles_dir.mkdir(parents=True, exist_ok=True) 781 782 paths = set(compatibles_dir / compatible_filename(compatible) 783 for compatible in compatibles) 784 785 for path in compatibles_dir.iterdir(): 786 if path not in paths: 787 logger.info('removing unexpected file %s', path) 788 path.unlink() 789 790 791def print_compatible_disambiguation_page(compatible, bindings, string_io): 792 # Print the disambiguation page for 'compatible', which can be 793 # handled by any of the bindings in 'bindings', to 'string_io'. 794 795 assert len(bindings) > 1, (compatible, bindings) 796 797 underline = '#' * len(compatible) 798 output_list = '\n '.join(f'- :ref:`{binding_ref_target(binding)}`' 799 for binding in bindings) 800 801 print_block(f'''\ 802 :orphan: 803 804 .. dtcompatible:: {compatible} 805 806 {compatible} 807 {underline} 808 809 The devicetree compatible ``{compatible}`` may be handled by any 810 of the following bindings: 811 812 {output_list} 813 ''', string_io) 814 815def print_block(block, string_io): 816 # Helper for dedenting and printing a triple-quoted RST block. 817 # (Just a block of text, not necessarily just a 'code-block' 818 # directive.) 819 820 print(textwrap.dedent(block), file=string_io) 821 822def to_code_block(s, indent=0): 823 # Converts 's', a string, to an indented rst .. code-block::. The 824 # 'indent' argument is a leading indent for each line in the code 825 # block, in spaces. 826 indent = indent * ' ' 827 return ('.. code-block:: none\n\n' + 828 textwrap.indent(s, indent + ' ') + '\n') 829 830def compatible_vnd(compatible): 831 # Get the vendor prefix for a compatible string 'compatible'. 832 # 833 # For example, compatible_vnd('foo,device') is 'foo'. 834 # 835 # If 'compatible' has no comma (','), None is returned. 836 837 if ',' not in compatible: 838 return None 839 840 return compatible.split(',', 1)[0] 841 842def compatible_filename(compatible): 843 # Name of the per-compatible disambiguation page within the 844 # out_dir / compatibles directory. 845 846 return f'{compatible}.rst' 847 848def zref(target, text=None): 849 # Make an appropriate RST :ref:`text <target>` or :ref:`target` 850 # string to a zephyr documentation ref target 'target', and return 851 # it. 852 # 853 # By default, the bindings docs are in the main Zephyr 854 # documentation, but this script supports putting them in a 855 # separate Sphinx doc set. Since we also link to Zephyr 856 # documentation from the generated content, we have an environment 857 # variable based escape hatch for putting the target in the zephyr 858 # doc set. 859 # 860 # This relies on intersphinx: 861 # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html 862 863 docset = os.environ.get('GEN_DEVICETREE_REST_ZEPHYR_DOCSET', '') 864 865 if docset.strip(): 866 target = f'{docset}:{target}' 867 868 if text: 869 return f':ref:`{text} <{target}>`' 870 871 return f':ref:`{target}`' 872 873def binding_filename(binding): 874 # Returns the output file name for a binding relative to the 875 # directory containing documentation for all bindings. It does 876 # this by stripping off the '.../dts/bindings/' prefix common to 877 # all bindings files in a DTS_ROOT directory. 878 # 879 # For example, for .../zephyr/dts/bindings/base/base.yaml, this 880 # would return 'base/base.yaml'. 881 # 882 # Hopefully that's unique across roots. If not, we'll need to 883 # update this function. 884 885 as_posix = Path(binding.path).as_posix() 886 dts_bindings = 'dts/bindings/' 887 idx = as_posix.rfind(dts_bindings) 888 889 if idx == -1: 890 raise ValueError(f'binding path has no {dts_bindings}: {binding.path}') 891 892 # Cut past dts/bindings, strip off the extension (.yaml or .yml), and 893 # replace with .rst. 894 return os.path.splitext(as_posix[idx + len(dts_bindings):])[0] + '.rst' 895 896def binding_ref_target(binding): 897 # Return the sphinx ':ref:' target name for a binding. 898 899 stem = Path(binding.path).stem 900 return 'dtbinding_' + re.sub('[/,-]', '_', stem) 901 902def write_if_updated(path, s): 903 # gen_helpers.write_if_updated() wrapper that handles logging and 904 # creating missing parents, as needed. 905 906 if not path.parent.is_dir(): 907 path.parent.mkdir(parents=True) 908 written = gen_helpers.write_if_updated(path, s) 909 logger.debug('%s %s', 'wrote' if written else 'did NOT write', path) 910 return written 911 912 913if __name__ == '__main__': 914 main() 915 sys.exit(0) 916