gen_defines.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976
  1. #!/usr/bin/env python3
  2. # Copyright (c) 2019 - 2020 Nordic Semiconductor ASA
  3. # Copyright (c) 2019 Linaro Limited
  4. # SPDX-License-Identifier: BSD-3-Clause
  5. # This script uses edtlib to generate a header file from a devicetree
  6. # (.dts) file. Information from binding files in YAML format is used
  7. # as well.
  8. #
  9. # Bindings are files that describe devicetree nodes. Devicetree nodes are
  10. # usually mapped to bindings via their 'compatible = "..."' property.
  11. #
  12. # See Zephyr's Devicetree user guide for details.
  13. #
  14. # Note: Do not access private (_-prefixed) identifiers from edtlib here (and
  15. # also note that edtlib is not meant to expose the dtlib API directly).
  16. # Instead, think of what API you need, and add it as a public documented API in
  17. # edtlib. This will keep this script simple.
  18. import argparse
  19. from collections import defaultdict
  20. import logging
  21. import os
  22. import pathlib
  23. import pickle
  24. import re
  25. import sys
  26. sys.path.append(os.path.join(os.path.dirname(__file__), 'python-devicetree',
  27. 'src'))
  28. from devicetree import edtlib
  29. # The set of binding types whose values can be iterated over with
  30. # DT_FOREACH_PROP_ELEM(). If you change this, make sure to update the
  31. # doxygen string for that macro.
  32. FOREACH_PROP_ELEM_TYPES = set(['string', 'array', 'uint8-array', 'string-array',
  33. 'phandles', 'phandle-array'])
  34. class LogFormatter(logging.Formatter):
  35. '''A log formatter that prints the level name in lower case,
  36. for compatibility with earlier versions of edtlib.'''
  37. def __init__(self):
  38. super().__init__(fmt='%(levelnamelower)s: %(message)s')
  39. def format(self, record):
  40. record.levelnamelower = record.levelname.lower()
  41. return super().format(record)
  42. def main():
  43. global header_file
  44. global flash_area_num
  45. args = parse_args()
  46. setup_edtlib_logging()
  47. vendor_prefixes = {}
  48. for prefixes_file in args.vendor_prefixes:
  49. vendor_prefixes.update(edtlib.load_vendor_prefixes_txt(prefixes_file))
  50. try:
  51. edt = edtlib.EDT(args.dts, args.bindings_dirs,
  52. # Suppress this warning if it's suppressed in dtc
  53. warn_reg_unit_address_mismatch=
  54. "-Wno-simple_bus_reg" not in args.dtc_flags,
  55. default_prop_types=True,
  56. infer_binding_for_paths=["/zephyr,user"],
  57. werror=args.edtlib_Werror,
  58. vendor_prefixes=vendor_prefixes)
  59. except edtlib.EDTError as e:
  60. sys.exit(f"devicetree error: {e}")
  61. flash_area_num = 0
  62. # Save merged DTS source, as a debugging aid
  63. with open(args.dts_out, "w", encoding="utf-8") as f:
  64. print(edt.dts_source, file=f)
  65. # The raw index into edt.compat2nodes[compat] is used for node
  66. # instance numbering within a compatible.
  67. #
  68. # As a way to satisfy people's intuitions about instance numbers,
  69. # though, we sort this list so enabled instances come first.
  70. #
  71. # This might look like a hack, but it keeps drivers and
  72. # applications which don't use instance numbers carefully working
  73. # as expected, since e.g. instance number 0 is always the
  74. # singleton instance if there's just one enabled node of a
  75. # particular compatible.
  76. #
  77. # This doesn't violate any devicetree.h API guarantees about
  78. # instance ordering, since we make no promises that instance
  79. # numbers are stable across builds.
  80. for compat, nodes in edt.compat2nodes.items():
  81. edt.compat2nodes[compat] = sorted(
  82. nodes, key=lambda node: 0 if node.status == "okay" else 1)
  83. # Create the generated header.
  84. with open(args.header_out, "w", encoding="utf-8") as header_file:
  85. write_top_comment(edt)
  86. # populate all z_path_id first so any children references will
  87. # work correctly.
  88. for node in sorted(edt.nodes, key=lambda node: node.dep_ordinal):
  89. node.z_path_id = node_z_path_id(node)
  90. for node in sorted(edt.nodes, key=lambda node: node.dep_ordinal):
  91. write_node_comment(node)
  92. out_comment("Node's full path:")
  93. out_dt_define(f"{node.z_path_id}_PATH", f'"{escape(node.path)}"')
  94. out_comment("Node's name with unit-address:")
  95. out_dt_define(f"{node.z_path_id}_FULL_NAME",
  96. f'"{escape(node.name)}"')
  97. if node.parent is not None:
  98. out_comment(f"Node parent ({node.parent.path}) identifier:")
  99. out_dt_define(f"{node.z_path_id}_PARENT",
  100. f"DT_{node.parent.z_path_id}")
  101. write_child_functions(node)
  102. write_child_functions_status_okay(node)
  103. write_dep_info(node)
  104. write_idents_and_existence(node)
  105. write_bus(node)
  106. write_special_props(node)
  107. write_vanilla_props(node)
  108. write_chosen(edt)
  109. write_global_compat_info(edt)
  110. write_device_extern_header(args.device_header_out, edt)
  111. if args.edt_pickle_out:
  112. write_pickled_edt(edt, args.edt_pickle_out)
  113. def write_device_extern_header(device_header_out, edt):
  114. # Generate header that will extern devicetree struct device's
  115. with open(device_header_out, "w", encoding="utf-8") as dev_header_file:
  116. print("#ifndef DEVICE_EXTERN_GEN_H", file=dev_header_file)
  117. print("#define DEVICE_EXTERN_GEN_H", file=dev_header_file)
  118. print("", file=dev_header_file)
  119. print("#ifdef __cplusplus", file=dev_header_file)
  120. print('extern "C" {', file=dev_header_file)
  121. print("#endif", file=dev_header_file)
  122. print("", file=dev_header_file)
  123. for node in sorted(edt.nodes, key=lambda node: node.dep_ordinal):
  124. print(f"extern const struct device DEVICE_DT_NAME_GET(DT_{node.z_path_id}); /* dts_ord_{node.dep_ordinal} */",
  125. file=dev_header_file)
  126. print("", file=dev_header_file)
  127. print("#ifdef __cplusplus", file=dev_header_file)
  128. print("}", file=dev_header_file)
  129. print("#endif", file=dev_header_file)
  130. print("", file=dev_header_file)
  131. print("#endif /* DEVICE_EXTERN_GEN_H */", file=dev_header_file)
  132. def setup_edtlib_logging():
  133. # The edtlib module emits logs using the standard 'logging' module.
  134. # Configure it so that warnings and above are printed to stderr,
  135. # using the LogFormatter class defined above to format each message.
  136. handler = logging.StreamHandler(sys.stderr)
  137. handler.setFormatter(LogFormatter())
  138. logger = logging.getLogger('edtlib')
  139. logger.setLevel(logging.WARNING)
  140. logger.addHandler(handler)
  141. def node_z_path_id(node):
  142. # Return the node specific bit of the node's path identifier:
  143. #
  144. # - the root node's path "/" has path identifier "N"
  145. # - "/foo" has "N_S_foo"
  146. # - "/foo/bar" has "N_S_foo_S_bar"
  147. # - "/foo/bar@123" has "N_S_foo_S_bar_123"
  148. #
  149. # This is used throughout this file to generate macros related to
  150. # the node.
  151. components = ["N"]
  152. if node.parent is not None:
  153. components.extend(f"S_{str2ident(component)}" for component in
  154. node.path.split("/")[1:])
  155. return "_".join(components)
  156. def parse_args():
  157. # Returns parsed command-line arguments
  158. parser = argparse.ArgumentParser()
  159. parser.add_argument("--dts", required=True, help="DTS file")
  160. parser.add_argument("--dtc-flags",
  161. help="'dtc' devicetree compiler flags, some of which "
  162. "might be respected here")
  163. parser.add_argument("--bindings-dirs", nargs='+', required=True,
  164. help="directory with bindings in YAML format, "
  165. "we allow multiple")
  166. parser.add_argument("--header-out", required=True,
  167. help="path to write header to")
  168. parser.add_argument("--dts-out", required=True,
  169. help="path to write merged DTS source code to (e.g. "
  170. "as a debugging aid)")
  171. parser.add_argument("--device-header-out", required=True,
  172. help="path to write device struct extern header to")
  173. parser.add_argument("--edt-pickle-out",
  174. help="path to write pickled edtlib.EDT object to")
  175. parser.add_argument("--vendor-prefixes", action='append', default=[],
  176. help="vendor-prefixes.txt path; used for validation; "
  177. "may be given multiple times")
  178. parser.add_argument("--edtlib-Werror", action="store_true",
  179. help="if set, edtlib-specific warnings become errors. "
  180. "(this does not apply to warnings shared "
  181. "with dtc.)")
  182. return parser.parse_args()
  183. def write_top_comment(edt):
  184. # Writes an overview comment with misc. info at the top of the header and
  185. # configuration file
  186. s = f"""\
  187. Generated by gen_defines.py
  188. DTS input file:
  189. {edt.dts_path}
  190. Directories with bindings:
  191. {", ".join(map(relativize, edt.bindings_dirs))}
  192. Node dependency ordering (ordinal and path):
  193. """
  194. for scc in edt.scc_order:
  195. if len(scc) > 1:
  196. err("cycle in devicetree involving "
  197. + ", ".join(node.path for node in scc))
  198. s += f" {scc[0].dep_ordinal:<3} {scc[0].path}\n"
  199. s += """
  200. Definitions derived from these nodes in dependency order are next,
  201. followed by /chosen nodes.
  202. """
  203. out_comment(s, blank_before=False)
  204. def write_node_comment(node):
  205. # Writes a comment describing 'node' to the header and configuration file
  206. s = f"""\
  207. Devicetree node: {node.path}
  208. Node identifier: DT_{node.z_path_id}
  209. """
  210. if node.matching_compat:
  211. if node.binding_path:
  212. s += f"""
  213. Binding (compatible = {node.matching_compat}):
  214. {relativize(node.binding_path)}
  215. """
  216. else:
  217. s += f"""
  218. Binding (compatible = {node.matching_compat}):
  219. No yaml (bindings inferred from properties)
  220. """
  221. if node.description:
  222. # We used to put descriptions in the generated file, but
  223. # devicetree bindings now have pages in the HTML
  224. # documentation. Let users who are accustomed to digging
  225. # around in the generated file where to find the descriptions
  226. # now.
  227. #
  228. # Keeping them here would mean that the descriptions
  229. # themselves couldn't contain C multi-line comments, which is
  230. # inconvenient when we want to do things like quote snippets
  231. # of .dtsi files within the descriptions, or otherwise
  232. # include the string "*/".
  233. s += ("\n(Descriptions have moved to the Devicetree Bindings Index\n"
  234. "in the documentation.)\n")
  235. out_comment(s)
  236. def relativize(path):
  237. # If 'path' is within $ZEPHYR_BASE, returns it relative to $ZEPHYR_BASE,
  238. # with a "$ZEPHYR_BASE/..." hint at the start of the string. Otherwise,
  239. # returns 'path' unchanged.
  240. zbase = os.getenv("ZEPHYR_BASE")
  241. if zbase is None:
  242. return path
  243. try:
  244. return str("$ZEPHYR_BASE" / pathlib.Path(path).relative_to(zbase))
  245. except ValueError:
  246. # Not within ZEPHYR_BASE
  247. return path
  248. def write_idents_and_existence(node):
  249. # Writes macros related to the node's aliases, labels, etc.,
  250. # as well as existence flags.
  251. # Aliases
  252. idents = [f"N_ALIAS_{str2ident(alias)}" for alias in node.aliases]
  253. # Instances
  254. for compat in node.compats:
  255. instance_no = node.edt.compat2nodes[compat].index(node)
  256. idents.append(f"N_INST_{instance_no}_{str2ident(compat)}")
  257. # Node labels
  258. idents.extend(f"N_NODELABEL_{str2ident(label)}" for label in node.labels)
  259. out_comment("Existence and alternate IDs:")
  260. out_dt_define(node.z_path_id + "_EXISTS", 1)
  261. # Only determine maxlen if we have any idents
  262. if idents:
  263. maxlen = max(len("DT_" + ident) for ident in idents)
  264. for ident in idents:
  265. out_dt_define(ident, "DT_" + node.z_path_id, width=maxlen)
  266. def write_bus(node):
  267. # Macros about the node's bus controller, if there is one
  268. bus = node.bus_node
  269. if not bus:
  270. return
  271. if not bus.label:
  272. err(f"missing 'label' property on bus node {bus!r}")
  273. out_comment(f"Bus info (controller: '{bus.path}', type: '{node.on_bus}')")
  274. out_dt_define(f"{node.z_path_id}_BUS_{str2ident(node.on_bus)}", 1)
  275. out_dt_define(f"{node.z_path_id}_BUS", f"DT_{bus.z_path_id}")
  276. def write_special_props(node):
  277. # Writes required macros for special case properties, when the
  278. # data cannot otherwise be obtained from write_vanilla_props()
  279. # results
  280. # Macros that are special to the devicetree specification
  281. out_comment("Macros for properties that are special in the specification:")
  282. write_regs(node)
  283. write_interrupts(node)
  284. write_compatibles(node)
  285. write_status(node)
  286. # Macros that are special to bindings inherited from Linux, which
  287. # we can't capture with the current bindings language.
  288. write_pinctrls(node)
  289. write_fixed_partitions(node)
  290. def write_regs(node):
  291. # reg property: edtlib knows the right #address-cells and
  292. # #size-cells, and can therefore pack the register base addresses
  293. # and sizes correctly
  294. idx_vals = []
  295. name_vals = []
  296. path_id = node.z_path_id
  297. if node.regs is not None:
  298. idx_vals.append((f"{path_id}_REG_NUM", len(node.regs)))
  299. for i, reg in enumerate(node.regs):
  300. idx_vals.append((f"{path_id}_REG_IDX_{i}_EXISTS", 1))
  301. if reg.addr is not None:
  302. idx_macro = f"{path_id}_REG_IDX_{i}_VAL_ADDRESS"
  303. idx_vals.append((idx_macro,
  304. f"{reg.addr} /* {hex(reg.addr)} */"))
  305. if reg.name:
  306. name_macro = f"{path_id}_REG_NAME_{reg.name}_VAL_ADDRESS"
  307. name_vals.append((name_macro, f"DT_{idx_macro}"))
  308. if reg.size is not None:
  309. idx_macro = f"{path_id}_REG_IDX_{i}_VAL_SIZE"
  310. idx_vals.append((idx_macro,
  311. f"{reg.size} /* {hex(reg.size)} */"))
  312. if reg.name:
  313. name_macro = f"{path_id}_REG_NAME_{reg.name}_VAL_SIZE"
  314. name_vals.append((name_macro, f"DT_{idx_macro}"))
  315. for macro, val in idx_vals:
  316. out_dt_define(macro, val)
  317. for macro, val in name_vals:
  318. out_dt_define(macro, val)
  319. def write_interrupts(node):
  320. # interrupts property: we have some hard-coded logic for interrupt
  321. # mapping here.
  322. #
  323. # TODO: can we push map_arm_gic_irq_type() and
  324. # encode_zephyr_multi_level_irq() out of Python and into C with
  325. # macro magic in devicetree.h?
  326. def map_arm_gic_irq_type(irq, irq_num):
  327. # Maps ARM GIC IRQ (type)+(index) combo to linear IRQ number
  328. if "type" not in irq.data:
  329. err(f"Expected binding for {irq.controller!r} to have 'type' in "
  330. "interrupt-cells")
  331. irq_type = irq.data["type"]
  332. if irq_type == 0: # GIC_SPI
  333. return irq_num + 32
  334. if irq_type == 1: # GIC_PPI
  335. return irq_num + 16
  336. err(f"Invalid interrupt type specified for {irq!r}")
  337. def encode_zephyr_multi_level_irq(irq, irq_num):
  338. # See doc/reference/kernel/other/interrupts.rst for details
  339. # on how this encoding works
  340. irq_ctrl = irq.controller
  341. # Look for interrupt controller parent until we have none
  342. while irq_ctrl.interrupts:
  343. irq_num = (irq_num + 1) << 8
  344. if "irq" not in irq_ctrl.interrupts[0].data:
  345. err(f"Expected binding for {irq_ctrl!r} to have 'irq' in "
  346. "interrupt-cells")
  347. irq_num |= irq_ctrl.interrupts[0].data["irq"]
  348. irq_ctrl = irq_ctrl.interrupts[0].controller
  349. return irq_num
  350. idx_vals = []
  351. name_vals = []
  352. path_id = node.z_path_id
  353. if node.interrupts is not None:
  354. idx_vals.append((f"{path_id}_IRQ_NUM", len(node.interrupts)))
  355. for i, irq in enumerate(node.interrupts):
  356. for cell_name, cell_value in irq.data.items():
  357. name = str2ident(cell_name)
  358. if cell_name == "irq":
  359. if "arm,gic" in irq.controller.compats:
  360. cell_value = map_arm_gic_irq_type(irq, cell_value)
  361. cell_value = encode_zephyr_multi_level_irq(irq, cell_value)
  362. idx_vals.append((f"{path_id}_IRQ_IDX_{i}_EXISTS", 1))
  363. idx_macro = f"{path_id}_IRQ_IDX_{i}_VAL_{name}"
  364. idx_vals.append((idx_macro, cell_value))
  365. idx_vals.append((idx_macro + "_EXISTS", 1))
  366. if irq.name:
  367. name_macro = \
  368. f"{path_id}_IRQ_NAME_{str2ident(irq.name)}_VAL_{name}"
  369. name_vals.append((name_macro, f"DT_{idx_macro}"))
  370. name_vals.append((name_macro + "_EXISTS", 1))
  371. for macro, val in idx_vals:
  372. out_dt_define(macro, val)
  373. for macro, val in name_vals:
  374. out_dt_define(macro, val)
  375. def write_compatibles(node):
  376. # Writes a macro for each of the node's compatibles. We don't care
  377. # about whether edtlib / Zephyr's binding language recognizes
  378. # them. The compatibles the node provides are what is important.
  379. for compat in node.compats:
  380. out_dt_define(
  381. f"{node.z_path_id}_COMPAT_MATCHES_{str2ident(compat)}", 1)
  382. def write_child_functions(node):
  383. # Writes macro that are helpers that will call a macro/function
  384. # for each child node.
  385. out_dt_define(f"{node.z_path_id}_FOREACH_CHILD(fn)",
  386. " ".join(f"fn(DT_{child.z_path_id})" for child in
  387. node.children.values()))
  388. out_dt_define(f"{node.z_path_id}_FOREACH_CHILD_VARGS(fn, ...)",
  389. " ".join(f"fn(DT_{child.z_path_id}, __VA_ARGS__)" for child in
  390. node.children.values()))
  391. def write_child_functions_status_okay(node):
  392. # Writes macros that are helpers that will call a macro/function
  393. # for each child node with status "okay".
  394. functions = ''
  395. functions_args = ''
  396. for child in node.children.values():
  397. if child.status == "okay":
  398. functions = functions + f"fn(DT_{child.z_path_id}) "
  399. functions_args = functions_args + f"fn(DT_{child.z_path_id}, " \
  400. "__VA_ARGS__) "
  401. out_dt_define(f"{node.z_path_id}_FOREACH_CHILD_STATUS_OKAY(fn)", functions)
  402. out_dt_define(f"{node.z_path_id}_FOREACH_CHILD_STATUS_OKAY_VARGS(fn, ...)",
  403. functions_args)
  404. def write_status(node):
  405. out_dt_define(f"{node.z_path_id}_STATUS_{str2ident(node.status)}", 1)
  406. def write_pinctrls(node):
  407. # Write special macros for pinctrl-<index> and pinctrl-names properties.
  408. out_comment("Pin control (pinctrl-<i>, pinctrl-names) properties:")
  409. out_dt_define(f"{node.z_path_id}_PINCTRL_NUM", len(node.pinctrls))
  410. if not node.pinctrls:
  411. return
  412. for pc_idx, pinctrl in enumerate(node.pinctrls):
  413. out_dt_define(f"{node.z_path_id}_PINCTRL_IDX_{pc_idx}_EXISTS", 1)
  414. if not pinctrl.name:
  415. continue
  416. name = pinctrl.name_as_token
  417. # Below we rely on the fact that edtlib ensures the
  418. # pinctrl-<pc_idx> properties are contiguous, start from 0,
  419. # and contain only phandles.
  420. out_dt_define(f"{node.z_path_id}_PINCTRL_IDX_{pc_idx}_TOKEN", name)
  421. out_dt_define(f"{node.z_path_id}_PINCTRL_IDX_{pc_idx}_UPPER_TOKEN", name.upper())
  422. out_dt_define(f"{node.z_path_id}_PINCTRL_NAME_{name}_EXISTS", 1)
  423. out_dt_define(f"{node.z_path_id}_PINCTRL_NAME_{name}_IDX", pc_idx)
  424. for idx, ph in enumerate(pinctrl.conf_nodes):
  425. out_dt_define(f"{node.z_path_id}_PINCTRL_NAME_{name}_IDX_{idx}_PH",
  426. f"DT_{ph.z_path_id}")
  427. def write_fixed_partitions(node):
  428. # Macros for child nodes of each fixed-partitions node.
  429. if not (node.parent and "fixed-partitions" in node.parent.compats):
  430. return
  431. global flash_area_num
  432. out_comment("fixed-partitions identifier:")
  433. out_dt_define(f"{node.z_path_id}_PARTITION_ID", flash_area_num)
  434. flash_area_num += 1
  435. def write_vanilla_props(node):
  436. # Writes macros for any and all properties defined in the
  437. # "properties" section of the binding for the node.
  438. #
  439. # This does generate macros for special properties as well, like
  440. # regs, etc. Just let that be rather than bothering to add
  441. # never-ending amounts of special case code here to skip special
  442. # properties. This function's macros can't conflict with
  443. # write_special_props() macros, because they're in different
  444. # namespaces. Special cases aren't special enough to break the rules.
  445. macro2val = {}
  446. for prop_name, prop in node.props.items():
  447. prop_id = str2ident(prop_name)
  448. macro = f"{node.z_path_id}_P_{prop_id}"
  449. val = prop2value(prop)
  450. if val is not None:
  451. # DT_N_<node-id>_P_<prop-id>
  452. macro2val[macro] = val
  453. if prop.spec.type == 'string':
  454. macro2val[macro + "_STRING_TOKEN"] = prop.val_as_token
  455. macro2val[macro + "_STRING_UPPER_TOKEN"] = prop.val_as_token.upper()
  456. if prop.enum_index is not None:
  457. # DT_N_<node-id>_P_<prop-id>_ENUM_IDX
  458. macro2val[macro + "_ENUM_IDX"] = prop.enum_index
  459. spec = prop.spec
  460. if spec.enum_tokenizable:
  461. as_token = prop.val_as_token
  462. # DT_N_<node-id>_P_<prop-id>_ENUM_TOKEN
  463. macro2val[macro + "_ENUM_TOKEN"] = as_token
  464. if spec.enum_upper_tokenizable:
  465. # DT_N_<node-id>_P_<prop-id>_ENUM_UPPER_TOKEN
  466. macro2val[macro + "_ENUM_UPPER_TOKEN"] = as_token.upper()
  467. if "phandle" in prop.type:
  468. macro2val.update(phandle_macros(prop, macro))
  469. elif "array" in prop.type:
  470. # DT_N_<node-id>_P_<prop-id>_IDX_<i>
  471. # DT_N_<node-id>_P_<prop-id>_IDX_<i>_EXISTS
  472. for i, subval in enumerate(prop.val):
  473. if isinstance(subval, str):
  474. macro2val[macro + f"_IDX_{i}"] = quote_str(subval)
  475. else:
  476. macro2val[macro + f"_IDX_{i}"] = subval
  477. macro2val[macro + f"_IDX_{i}_EXISTS"] = 1
  478. if prop.type in FOREACH_PROP_ELEM_TYPES:
  479. # DT_N_<node-id>_P_<prop-id>_FOREACH_PROP_ELEM
  480. macro2val[f"{macro}_FOREACH_PROP_ELEM(fn)"] = \
  481. ' \\\n\t'.join(f'fn(DT_{node.z_path_id}, {prop_id}, {i})'
  482. for i in range(len(prop.val)))
  483. macro2val[f"{macro}_FOREACH_PROP_ELEM_VARGS(fn, ...)"] = \
  484. ' \\\n\t'.join(f'fn(DT_{node.z_path_id}, {prop_id}, {i},'
  485. ' __VA_ARGS__)'
  486. for i in range(len(prop.val)))
  487. plen = prop_len(prop)
  488. if plen is not None:
  489. # DT_N_<node-id>_P_<prop-id>_LEN
  490. macro2val[macro + "_LEN"] = plen
  491. macro2val[f"{macro}_EXISTS"] = 1
  492. if macro2val:
  493. out_comment("Generic property macros:")
  494. for macro, val in macro2val.items():
  495. out_dt_define(macro, val)
  496. else:
  497. out_comment("(No generic property macros)")
  498. def write_dep_info(node):
  499. # Write dependency-related information about the node.
  500. def fmt_dep_list(dep_list):
  501. if dep_list:
  502. # Sort the list by dependency ordinal for predictability.
  503. sorted_list = sorted(dep_list, key=lambda node: node.dep_ordinal)
  504. return "\\\n\t" + \
  505. " \\\n\t".join(f"{n.dep_ordinal}, /* {n.path} */"
  506. for n in sorted_list)
  507. else:
  508. return "/* nothing */"
  509. out_comment("Node's dependency ordinal:")
  510. out_dt_define(f"{node.z_path_id}_ORD", node.dep_ordinal)
  511. out_comment("Ordinals for what this node depends on directly:")
  512. out_dt_define(f"{node.z_path_id}_REQUIRES_ORDS",
  513. fmt_dep_list(node.depends_on))
  514. out_comment("Ordinals for what depends directly on this node:")
  515. out_dt_define(f"{node.z_path_id}_SUPPORTS_ORDS",
  516. fmt_dep_list(node.required_by))
  517. def prop2value(prop):
  518. # Gets the macro value for property 'prop', if there is
  519. # a single well-defined C rvalue that it can be represented as.
  520. # Returns None if there isn't one.
  521. if prop.type == "string":
  522. return quote_str(prop.val)
  523. if prop.type == "int":
  524. return prop.val
  525. if prop.type == "boolean":
  526. return 1 if prop.val else 0
  527. if prop.type in ["array", "uint8-array"]:
  528. return list2init(f"{val} /* {hex(val)} */" for val in prop.val)
  529. if prop.type == "string-array":
  530. return list2init(quote_str(val) for val in prop.val)
  531. # phandle, phandles, phandle-array, path, compound: nothing
  532. return None
  533. def prop_len(prop):
  534. # Returns the property's length if and only if we should generate
  535. # a _LEN macro for the property. Otherwise, returns None.
  536. #
  537. # This deliberately excludes reg and interrupts.
  538. # While they have array type, their lengths as arrays are
  539. # basically nonsense semantically due to #address-cells and
  540. # #size-cells for "reg" and #interrupt-cells for "interrupts".
  541. #
  542. # We have special purpose macros for the number of register blocks
  543. # / interrupt specifiers. Excluding them from this list means
  544. # DT_PROP_LEN(node_id, ...) fails fast at the devicetree.h layer
  545. # with a build error. This forces users to switch to the right
  546. # macros.
  547. if prop.type == "phandle":
  548. return 1
  549. if (prop.type in ["array", "uint8-array", "string-array",
  550. "phandles", "phandle-array"] and
  551. prop.name not in ["reg", "interrupts"]):
  552. return len(prop.val)
  553. return None
  554. def phandle_macros(prop, macro):
  555. # Returns a dict of macros for phandle or phandles property 'prop'.
  556. #
  557. # The 'macro' argument is the N_<node-id>_P_<prop-id> bit.
  558. #
  559. # These are currently special because we can't serialize their
  560. # values without using label properties, which we're trying to get
  561. # away from needing in Zephyr. (Label properties are great for
  562. # humans, but have drawbacks for code size and boot time.)
  563. #
  564. # The names look a bit weird to make it easier for devicetree.h
  565. # to use the same macros for phandle, phandles, and phandle-array.
  566. ret = {}
  567. if prop.type == "phandle":
  568. # A phandle is treated as a phandles with fixed length 1.
  569. ret[f"{macro}"] = f"DT_{prop.val.z_path_id}"
  570. ret[f"{macro}_IDX_0"] = f"DT_{prop.val.z_path_id}"
  571. ret[f"{macro}_IDX_0_PH"] = f"DT_{prop.val.z_path_id}"
  572. ret[f"{macro}_IDX_0_EXISTS"] = 1
  573. elif prop.type == "phandles":
  574. for i, node in enumerate(prop.val):
  575. ret[f"{macro}_IDX_{i}"] = f"DT_{node.z_path_id}"
  576. ret[f"{macro}_IDX_{i}_PH"] = f"DT_{node.z_path_id}"
  577. ret[f"{macro}_IDX_{i}_EXISTS"] = 1
  578. elif prop.type == "phandle-array":
  579. for i, entry in enumerate(prop.val):
  580. if entry is None:
  581. # Unspecified element. The phandle-array at this index
  582. # does not point at a ControllerAndData value, but
  583. # subsequent indices in the array may.
  584. ret[f"{macro}_IDX_{i}_EXISTS"] = 0
  585. continue
  586. ret.update(controller_and_data_macros(entry, i, macro))
  587. return ret
  588. def controller_and_data_macros(entry, i, macro):
  589. # Helper procedure used by phandle_macros().
  590. #
  591. # Its purpose is to write the "controller" (i.e. label property of
  592. # the phandle's node) and associated data macros for a
  593. # ControllerAndData.
  594. ret = {}
  595. data = entry.data
  596. # DT_N_<node-id>_P_<prop-id>_IDX_<i>_EXISTS
  597. ret[f"{macro}_IDX_{i}_EXISTS"] = 1
  598. # DT_N_<node-id>_P_<prop-id>_IDX_<i>_PH
  599. ret[f"{macro}_IDX_{i}_PH"] = f"DT_{entry.controller.z_path_id}"
  600. # DT_N_<node-id>_P_<prop-id>_IDX_<i>_VAL_<VAL>
  601. for cell, val in data.items():
  602. ret[f"{macro}_IDX_{i}_VAL_{str2ident(cell)}"] = val
  603. ret[f"{macro}_IDX_{i}_VAL_{str2ident(cell)}_EXISTS"] = 1
  604. if not entry.name:
  605. return ret
  606. name = str2ident(entry.name)
  607. # DT_N_<node-id>_P_<prop-id>_IDX_<i>_EXISTS
  608. ret[f"{macro}_IDX_{i}_EXISTS"] = 1
  609. # DT_N_<node-id>_P_<prop-id>_IDX_<i>_NAME
  610. ret[f"{macro}_IDX_{i}_NAME"] = quote_str(entry.name)
  611. # DT_N_<node-id>_P_<prop-id>_NAME_<NAME>_PH
  612. ret[f"{macro}_NAME_{name}_PH"] = f"DT_{entry.controller.z_path_id}"
  613. # DT_N_<node-id>_P_<prop-id>_NAME_<NAME>_EXISTS
  614. ret[f"{macro}_NAME_{name}_EXISTS"] = 1
  615. # DT_N_<node-id>_P_<prop-id>_NAME_<NAME>_VAL_<VAL>
  616. for cell, val in data.items():
  617. cell_ident = str2ident(cell)
  618. ret[f"{macro}_NAME_{name}_VAL_{cell_ident}"] = \
  619. f"DT_{macro}_IDX_{i}_VAL_{cell_ident}"
  620. ret[f"{macro}_NAME_{name}_VAL_{cell_ident}_EXISTS"] = 1
  621. return ret
  622. def write_chosen(edt):
  623. # Tree-wide information such as chosen nodes is printed here.
  624. out_comment("Chosen nodes\n")
  625. chosen = {}
  626. for name, node in edt.chosen_nodes.items():
  627. chosen[f"DT_CHOSEN_{str2ident(name)}"] = f"DT_{node.z_path_id}"
  628. chosen[f"DT_CHOSEN_{str2ident(name)}_EXISTS"] = 1
  629. max_len = max(map(len, chosen), default=0)
  630. for macro, value in chosen.items():
  631. out_define(macro, value, width=max_len)
  632. def write_global_compat_info(edt):
  633. # Tree-wide information related to each compatible, such as number
  634. # of instances with status "okay", is printed here.
  635. n_okay_macros = {}
  636. for_each_macros = {}
  637. compat2buses = defaultdict(list) # just for "okay" nodes
  638. for compat, okay_nodes in edt.compat2okay.items():
  639. for node in okay_nodes:
  640. bus = node.on_bus
  641. if bus is not None and bus not in compat2buses[compat]:
  642. compat2buses[compat].append(bus)
  643. ident = str2ident(compat)
  644. n_okay_macros[f"DT_N_INST_{ident}_NUM_OKAY"] = len(okay_nodes)
  645. # Helpers for non-INST for-each macros that take node
  646. # identifiers as arguments.
  647. for_each_macros[f"DT_FOREACH_OKAY_{ident}(fn)"] = \
  648. " ".join(f"fn(DT_{node.z_path_id})"
  649. for node in okay_nodes)
  650. for_each_macros[f"DT_FOREACH_OKAY_VARGS_{ident}(fn, ...)"] = \
  651. " ".join(f"fn(DT_{node.z_path_id}, __VA_ARGS__)"
  652. for node in okay_nodes)
  653. # Helpers for INST versions of for-each macros, which take
  654. # instance numbers. We emit separate helpers for these because
  655. # avoiding an intermediate node_id --> instance number
  656. # conversion in the preprocessor helps to keep the macro
  657. # expansions simpler. That hopefully eases debugging.
  658. for_each_macros[f"DT_FOREACH_OKAY_INST_{ident}(fn)"] = \
  659. " ".join(f"fn({edt.compat2nodes[compat].index(node)})"
  660. for node in okay_nodes)
  661. for_each_macros[f"DT_FOREACH_OKAY_INST_VARGS_{ident}(fn, ...)"] = \
  662. " ".join(f"fn({edt.compat2nodes[compat].index(node)}, __VA_ARGS__)"
  663. for node in okay_nodes)
  664. for compat, nodes in edt.compat2nodes.items():
  665. for node in nodes:
  666. if compat == "fixed-partitions":
  667. for child in node.children.values():
  668. if "label" in child.props:
  669. label = child.props["label"].val
  670. macro = f"COMPAT_{str2ident(compat)}_LABEL_{str2ident(label)}"
  671. val = f"DT_{child.z_path_id}"
  672. out_dt_define(macro, val)
  673. out_dt_define(macro + "_EXISTS", 1)
  674. out_comment('Macros for compatibles with status "okay" nodes\n')
  675. for compat, okay_nodes in edt.compat2okay.items():
  676. if okay_nodes:
  677. out_define(f"DT_COMPAT_HAS_OKAY_{str2ident(compat)}", 1)
  678. out_comment('Macros for status "okay" instances of each compatible\n')
  679. for macro, value in n_okay_macros.items():
  680. out_define(macro, value)
  681. for macro, value in for_each_macros.items():
  682. out_define(macro, value)
  683. out_comment('Bus information for status "okay" nodes of each compatible\n')
  684. for compat, buses in compat2buses.items():
  685. for bus in buses:
  686. out_define(
  687. f"DT_COMPAT_{str2ident(compat)}_BUS_{str2ident(bus)}", 1)
  688. def str2ident(s):
  689. # Converts 's' to a form suitable for (part of) an identifier
  690. return re.sub('[-,.@/+]', '_', s.lower())
  691. def list2init(l):
  692. # Converts 'l', a Python list (or iterable), to a C array initializer
  693. return "{" + ", ".join(l) + "}"
  694. def out_dt_define(macro, val, width=None, deprecation_msg=None):
  695. # Writes "#define DT_<macro> <val>" to the header file
  696. #
  697. # The macro will be left-justified to 'width' characters if that
  698. # is specified, and the value will follow immediately after in
  699. # that case. Otherwise, this function decides how to add
  700. # whitespace between 'macro' and 'val'.
  701. #
  702. # If a 'deprecation_msg' string is passed, the generated identifiers will
  703. # generate a warning if used, via __WARN(<deprecation_msg>)).
  704. #
  705. # Returns the full generated macro for 'macro', with leading "DT_".
  706. ret = "DT_" + macro
  707. out_define(ret, val, width=width, deprecation_msg=deprecation_msg)
  708. return ret
  709. def out_define(macro, val, width=None, deprecation_msg=None):
  710. # Helper for out_dt_define(). Outputs "#define <macro> <val>",
  711. # adds a deprecation message if given, and allocates whitespace
  712. # unless told not to.
  713. warn = fr' __WARN("{deprecation_msg}")' if deprecation_msg else ""
  714. if width:
  715. s = f"#define {macro.ljust(width)}{warn} {val}"
  716. else:
  717. s = f"#define {macro}{warn} {val}"
  718. print(s, file=header_file)
  719. def out_comment(s, blank_before=True):
  720. # Writes 's' as a comment to the header and configuration file. 's' is
  721. # allowed to have multiple lines. blank_before=True adds a blank line
  722. # before the comment.
  723. if blank_before:
  724. print(file=header_file)
  725. if "\n" in s:
  726. # Format multi-line comments like
  727. #
  728. # /*
  729. # * first line
  730. # * second line
  731. # *
  732. # * empty line before this line
  733. # */
  734. res = ["/*"]
  735. for line in s.splitlines():
  736. # Avoid an extra space after '*' for empty lines. They turn red in
  737. # Vim if space error checking is on, which is annoying.
  738. res.append(" *" if not line.strip() else " * " + line)
  739. res.append(" */")
  740. print("\n".join(res), file=header_file)
  741. else:
  742. # Format single-line comments like
  743. #
  744. # /* foo bar */
  745. print("/* " + s + " */", file=header_file)
  746. def escape(s):
  747. # Backslash-escapes any double quotes and backslashes in 's'
  748. # \ must be escaped before " to avoid double escaping
  749. return s.replace("\\", "\\\\").replace('"', '\\"')
  750. def quote_str(s):
  751. # Puts quotes around 's' and escapes any double quotes and
  752. # backslashes within it
  753. return f'"{escape(s)}"'
  754. def write_pickled_edt(edt, out_file):
  755. # Writes the edt object in pickle format to out_file.
  756. with open(out_file, 'wb') as f:
  757. # Pickle protocol version 4 is the default as of Python 3.8
  758. # and was introduced in 3.4, so it is both available and
  759. # recommended on all versions of Python that Zephyr supports
  760. # (at time of writing, Python 3.6 was Zephyr's minimum
  761. # version, and 3.8 the most recent CPython release).
  762. #
  763. # Using a common protocol version here will hopefully avoid
  764. # reproducibility issues in different Python installations.
  765. pickle.dump(edt, f, protocol=4)
  766. def err(s):
  767. raise Exception(s)
  768. if __name__ == "__main__":
  769. main()