rb.h 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223
  1. /*
  2. * Copyright (c) 2018 Intel Corporation
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. /* Our SDK/toolchains integration seems to be inconsistent about
  7. * whether they expose alloca.h or not. On gcc it's a moot point as
  8. * it's always builtin.
  9. */
  10. #ifdef __GNUC__
  11. #ifndef alloca
  12. #define alloca __builtin_alloca
  13. #endif
  14. #else
  15. #include <alloca.h>
  16. #endif
  17. /**
  18. * @file
  19. * @brief Red/Black balanced tree data structure
  20. *
  21. * This implements an intrusive balanced tree that guarantees
  22. * O(log2(N)) runtime for all operations and amortized O(1) behavior
  23. * for creation and destruction of whole trees. The algorithms and
  24. * naming are conventional per existing academic and didactic
  25. * implementations, c.f.:
  26. *
  27. * https://en.wikipedia.org/wiki/Red%E2%80%93black_tree
  28. *
  29. * The implementation is size-optimized to prioritize runtime memory
  30. * usage. The data structure is intrusive, which is to say the struct
  31. * rbnode handle is intended to be placed in a separate struct the
  32. * same way other such structures (e.g. Zephyr's dlist list) and
  33. * requires no data pointer to be stored in the node. The color bit
  34. * is unioned with a pointer (fairly common for such libraries). Most
  35. * notably, there is no "parent" pointer stored in the node, the upper
  36. * structure of the tree being generated dynamically via a stack as
  37. * the tree is recursed. So the overall memory overhead of a node is
  38. * just two pointers, identical with a doubly-linked list.
  39. */
  40. #ifndef ZEPHYR_INCLUDE_SYS_RB_H_
  41. #define ZEPHYR_INCLUDE_SYS_RB_H_
  42. #include <stdbool.h>
  43. #include <stdint.h>
  44. struct rbnode {
  45. struct rbnode *children[2];
  46. };
  47. /* Theoretical maximum depth of tree based on pointer size. If memory
  48. * is filled with 2-pointer nodes, and the tree can be twice as a
  49. * packed binary tree, plus root... Works out to 59 entries for 32
  50. * bit pointers and 121 at 64 bits.
  51. */
  52. #define Z_TBITS(t) ((sizeof(t)) < 8 ? 2 : 3)
  53. #define Z_PBITS(t) (8 * sizeof(t))
  54. #define Z_MAX_RBTREE_DEPTH (2 * (Z_PBITS(int *) - Z_TBITS(int *) - 1) + 1)
  55. /**
  56. * @defgroup rbtree_apis Balanced Red/Black Tree
  57. * @ingroup datastructure_apis
  58. * @{
  59. */
  60. /**
  61. * @typedef rb_lessthan_t
  62. * @brief Red/black tree comparison predicate
  63. *
  64. * Compares the two nodes and returns true if node A is strictly less
  65. * than B according to the tree's sorting criteria, false otherwise.
  66. *
  67. * Note that during insert, the new node being inserted will always be
  68. * "A", where "B" is the existing node within the tree against which
  69. * it is being compared. This trait can be used (with care!) to
  70. * implement "most/least recently added" semantics between nodes which
  71. * would otherwise compare as equal.
  72. */
  73. typedef bool (*rb_lessthan_t)(struct rbnode *a, struct rbnode *b);
  74. struct rbtree {
  75. struct rbnode *root;
  76. rb_lessthan_t lessthan_fn;
  77. int max_depth;
  78. #ifdef CONFIG_MISRA_SANE
  79. struct rbnode *iter_stack[Z_MAX_RBTREE_DEPTH];
  80. unsigned char iter_left[Z_MAX_RBTREE_DEPTH];
  81. #endif
  82. };
  83. typedef void (*rb_visit_t)(struct rbnode *node, void *cookie);
  84. struct rbnode *z_rb_child(struct rbnode *node, uint8_t side);
  85. int z_rb_is_black(struct rbnode *node);
  86. #ifndef CONFIG_MISRA_SANE
  87. void z_rb_walk(struct rbnode *node, rb_visit_t visit_fn, void *cookie);
  88. #endif
  89. struct rbnode *z_rb_get_minmax(struct rbtree *tree, uint8_t side);
  90. /**
  91. * @brief Insert node into tree
  92. */
  93. void rb_insert(struct rbtree *tree, struct rbnode *node);
  94. /**
  95. * @brief Remove node from tree
  96. */
  97. void rb_remove(struct rbtree *tree, struct rbnode *node);
  98. /**
  99. * @brief Returns the lowest-sorted member of the tree
  100. */
  101. static inline struct rbnode *rb_get_min(struct rbtree *tree)
  102. {
  103. return z_rb_get_minmax(tree, 0U);
  104. }
  105. /**
  106. * @brief Returns the highest-sorted member of the tree
  107. */
  108. static inline struct rbnode *rb_get_max(struct rbtree *tree)
  109. {
  110. return z_rb_get_minmax(tree, 1U);
  111. }
  112. /**
  113. * @brief Returns true if the given node is part of the tree
  114. *
  115. * Note that this does not internally dereference the node pointer
  116. * (though the tree's lessthan callback might!), it just tests it for
  117. * equality with items in the tree. So it's feasible to use this to
  118. * implement a "set" construct by simply testing the pointer value
  119. * itself.
  120. */
  121. bool rb_contains(struct rbtree *tree, struct rbnode *node);
  122. #ifndef CONFIG_MISRA_SANE
  123. /**
  124. * @brief Walk/enumerate a rbtree
  125. *
  126. * Very simple recursive enumeration. Low code size, but requiring a
  127. * separate function can be clumsy for the user and there is no way to
  128. * break out of the loop early. See RB_FOR_EACH for an iterative
  129. * implementation.
  130. */
  131. static inline void rb_walk(struct rbtree *tree, rb_visit_t visit_fn,
  132. void *cookie)
  133. {
  134. z_rb_walk(tree->root, visit_fn, cookie);
  135. }
  136. #endif
  137. struct _rb_foreach {
  138. struct rbnode **stack;
  139. uint8_t *is_left;
  140. int32_t top;
  141. };
  142. #ifdef CONFIG_MISRA_SANE
  143. #define _RB_FOREACH_INIT(tree, node) { \
  144. .stack = &(tree)->iter_stack[0], \
  145. .is_left = &(tree)->iter_left[0], \
  146. .top = -1 \
  147. }
  148. #else
  149. #define _RB_FOREACH_INIT(tree, node) { \
  150. .stack = (struct rbnode **) \
  151. alloca((tree)->max_depth * sizeof(struct rbnode *)), \
  152. .is_left = (uint8_t *)alloca((tree)->max_depth * sizeof(uint8_t)),\
  153. .top = -1 \
  154. }
  155. #endif
  156. struct rbnode *z_rb_foreach_next(struct rbtree *tree, struct _rb_foreach *f);
  157. /**
  158. * @brief Walk a tree in-order without recursing
  159. *
  160. * While @ref rb_walk() is very simple, recursing on the C stack can
  161. * be clumsy for some purposes and on some architectures wastes
  162. * significant memory in stack frames. This macro implements a
  163. * non-recursive "foreach" loop that can iterate directly on the tree,
  164. * at a moderate cost in code size.
  165. *
  166. * Note that the resulting loop is not safe against modifications to
  167. * the tree. Changes to the tree structure during the loop will
  168. * produce incorrect results, as nodes may be skipped or duplicated.
  169. * Unlike linked lists, no _SAFE variant exists.
  170. *
  171. * Note also that the macro expands its arguments multiple times, so
  172. * they should not be expressions with side effects.
  173. *
  174. * @param tree A pointer to a struct rbtree to walk
  175. * @param node The symbol name of a local struct rbnode* variable to
  176. * use as the iterator
  177. */
  178. #define RB_FOR_EACH(tree, node) \
  179. for (struct _rb_foreach __f = _RB_FOREACH_INIT(tree, node); \
  180. (node = z_rb_foreach_next(tree, &__f)); \
  181. /**/)
  182. /**
  183. * @brief Loop over rbtree with implicit container field logic
  184. *
  185. * As for RB_FOR_EACH(), but "node" can have an arbitrary type
  186. * containing a struct rbnode.
  187. *
  188. * @param tree A pointer to a struct rbtree to walk
  189. * @param node The symbol name of a local iterator
  190. * @param field The field name of a struct rbnode inside node
  191. */
  192. #define RB_FOR_EACH_CONTAINER(tree, node, field) \
  193. for (struct _rb_foreach __f = _RB_FOREACH_INIT(tree, node); \
  194. ({struct rbnode *n = z_rb_foreach_next(tree, &__f); \
  195. node = n ? CONTAINER_OF(n, __typeof__(*(node)), \
  196. field) : NULL; }) != NULL; \
  197. /**/)
  198. /** @} */
  199. #endif /* ZEPHYR_INCLUDE_SYS_RB_H_ */