1 /*
2  * Copyright (c) 2018 Intel Corporation
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 /* Our SDK/toolchains integration seems to be inconsistent about
8  * whether they expose alloca.h or not.  On gcc it's a moot point as
9  * it's always builtin.
10  */
11 #ifdef __GNUC__
12 #ifndef alloca
13 #define alloca __builtin_alloca
14 #endif
15 #else
16 #include <alloca.h>
17 #endif
18 
19 /**
20  * @file
21  * @brief Red/Black balanced tree data structure
22  *
23  * This implements an intrusive balanced tree that guarantees
24  * O(log2(N)) runtime for all operations and amortized O(1) behavior
25  * for creation and destruction of whole trees.  The algorithms and
26  * naming are conventional per existing academic and didactic
27  * implementations, c.f.:
28  *
29  * https://en.wikipedia.org/wiki/Red%E2%80%93black_tree
30  *
31  * The implementation is size-optimized to prioritize runtime memory
32  * usage.  The data structure is intrusive, which is to say the struct
33  * rbnode handle is intended to be placed in a separate struct the
34  * same way other such structures (e.g. Zephyr's dlist list) and
35  * requires no data pointer to be stored in the node.  The color bit
36  * is unioned with a pointer (fairly common for such libraries).  Most
37  * notably, there is no "parent" pointer stored in the node, the upper
38  * structure of the tree being generated dynamically via a stack as
39  * the tree is recursed.  So the overall memory overhead of a node is
40  * just two pointers, identical with a doubly-linked list.
41  */
42 
43 #ifndef ZEPHYR_INCLUDE_SYS_RB_H_
44 #define ZEPHYR_INCLUDE_SYS_RB_H_
45 
46 #include <stdbool.h>
47 #include <stdint.h>
48 
49 struct rbnode {
50 	struct rbnode *children[2];
51 };
52 
53 /* Theoretical maximum depth of tree based on pointer size. If memory
54  * is filled with 2-pointer nodes, and the tree can be twice as a
55  * packed binary tree, plus root...  Works out to 59 entries for 32
56  * bit pointers and 121 at 64 bits.
57  */
58 #define Z_TBITS(t) ((sizeof(t)) < 8 ? 2 : 3)
59 #define Z_PBITS(t) (8 * sizeof(t))
60 #define Z_MAX_RBTREE_DEPTH (2 * (Z_PBITS(int *) - Z_TBITS(int *) - 1) + 1)
61 
62 
63  /**
64   * @defgroup rbtree_apis Balanced Red/Black Tree
65   * @ingroup datastructure_apis
66   * @{
67   */
68 /**
69  * @typedef rb_lessthan_t
70  * @brief Red/black tree comparison predicate
71  *
72  * Compares the two nodes and returns true if node A is strictly less
73  * than B according to the tree's sorting criteria, false otherwise.
74  *
75  * Note that during insert, the new node being inserted will always be
76  * "A", where "B" is the existing node within the tree against which
77  * it is being compared.  This trait can be used (with care!) to
78  * implement "most/least recently added" semantics between nodes which
79  * would otherwise compare as equal.
80  */
81 typedef bool (*rb_lessthan_t)(struct rbnode *a, struct rbnode *b);
82 
83 struct rbtree {
84 	struct rbnode *root;
85 	rb_lessthan_t lessthan_fn;
86 	int max_depth;
87 #ifdef CONFIG_MISRA_SANE
88 	struct rbnode *iter_stack[Z_MAX_RBTREE_DEPTH];
89 	unsigned char iter_left[Z_MAX_RBTREE_DEPTH];
90 #endif
91 };
92 
93 typedef void (*rb_visit_t)(struct rbnode *node, void *cookie);
94 
95 struct rbnode *z_rb_child(struct rbnode *node, uint8_t side);
96 int z_rb_is_black(struct rbnode *node);
97 #ifndef CONFIG_MISRA_SANE
98 void z_rb_walk(struct rbnode *node, rb_visit_t visit_fn, void *cookie);
99 #endif
100 struct rbnode *z_rb_get_minmax(struct rbtree *tree, uint8_t side);
101 
102 /**
103  * @brief Insert node into tree
104  */
105 void rb_insert(struct rbtree *tree, struct rbnode *node);
106 
107 /**
108  * @brief Remove node from tree
109  */
110 void rb_remove(struct rbtree *tree, struct rbnode *node);
111 
112 /**
113  * @brief Returns the lowest-sorted member of the tree
114  */
rb_get_min(struct rbtree * tree)115 static inline struct rbnode *rb_get_min(struct rbtree *tree)
116 {
117 	return z_rb_get_minmax(tree, 0U);
118 }
119 
120 /**
121  * @brief Returns the highest-sorted member of the tree
122  */
rb_get_max(struct rbtree * tree)123 static inline struct rbnode *rb_get_max(struct rbtree *tree)
124 {
125 	return z_rb_get_minmax(tree, 1U);
126 }
127 
128 /**
129  * @brief Returns true if the given node is part of the tree
130  *
131  * Note that this does not internally dereference the node pointer
132  * (though the tree's lessthan callback might!), it just tests it for
133  * equality with items in the tree.  So it's feasible to use this to
134  * implement a "set" construct by simply testing the pointer value
135  * itself.
136  */
137 bool rb_contains(struct rbtree *tree, struct rbnode *node);
138 
139 #ifndef CONFIG_MISRA_SANE
140 /**
141  * @brief Walk/enumerate a rbtree
142  *
143  * Very simple recursive enumeration.  Low code size, but requiring a
144  * separate function can be clumsy for the user and there is no way to
145  * break out of the loop early.  See RB_FOR_EACH for an iterative
146  * implementation.
147  */
rb_walk(struct rbtree * tree,rb_visit_t visit_fn,void * cookie)148 static inline void rb_walk(struct rbtree *tree, rb_visit_t visit_fn,
149 			   void *cookie)
150 {
151 	z_rb_walk(tree->root, visit_fn, cookie);
152 }
153 #endif
154 
155 struct _rb_foreach {
156 	struct rbnode **stack;
157 	uint8_t *is_left;
158 	int32_t top;
159 };
160 
161 #ifdef CONFIG_MISRA_SANE
162 #define _RB_FOREACH_INIT(tree, node) {					\
163 	.stack   = &(tree)->iter_stack[0],				\
164 	.is_left = &(tree)->iter_left[0],				\
165 	.top     = -1							\
166 }
167 #else
168 #define _RB_FOREACH_INIT(tree, node) {					\
169 	.stack   = (struct rbnode **)					\
170 			alloca((tree)->max_depth * sizeof(struct rbnode *)), \
171 	.is_left = (uint8_t *)alloca((tree)->max_depth * sizeof(uint8_t)),\
172 	.top     = -1							\
173 }
174 #endif
175 
176 struct rbnode *z_rb_foreach_next(struct rbtree *tree, struct _rb_foreach *f);
177 
178 /**
179  * @brief Walk a tree in-order without recursing
180  *
181  * While @ref rb_walk() is very simple, recursing on the C stack can
182  * be clumsy for some purposes and on some architectures wastes
183  * significant memory in stack frames.  This macro implements a
184  * non-recursive "foreach" loop that can iterate directly on the tree,
185  * at a moderate cost in code size.
186  *
187  * Note that the resulting loop is not safe against modifications to
188  * the tree.  Changes to the tree structure during the loop will
189  * produce incorrect results, as nodes may be skipped or duplicated.
190  * Unlike linked lists, no _SAFE variant exists.
191  *
192  * Note also that the macro expands its arguments multiple times, so
193  * they should not be expressions with side effects.
194  *
195  * @param tree A pointer to a struct rbtree to walk
196  * @param node The symbol name of a local struct rbnode* variable to
197  *             use as the iterator
198  */
199 #define RB_FOR_EACH(tree, node) \
200 	for (struct _rb_foreach __f = _RB_FOREACH_INIT(tree, node);	\
201 	     (node = z_rb_foreach_next(tree, &__f));			\
202 	     /**/)
203 
204 /**
205  * @brief Loop over rbtree with implicit container field logic
206  *
207  * As for RB_FOR_EACH(), but "node" can have an arbitrary type
208  * containing a struct rbnode.
209  *
210  * @param tree A pointer to a struct rbtree to walk
211  * @param node The symbol name of a local iterator
212  * @param field The field name of a struct rbnode inside node
213  */
214 #define RB_FOR_EACH_CONTAINER(tree, node, field)		           \
215 	for (struct _rb_foreach __f = _RB_FOREACH_INIT(tree, node);	   \
216 			({struct rbnode *n = z_rb_foreach_next(tree, &__f); \
217 			 node = n ? CONTAINER_OF(n, __typeof__(*(node)),   \
218 					 field) : NULL; }) != NULL;        \
219 			 /**/)
220 
221 /** @} */
222 
223 #endif /* ZEPHYR_INCLUDE_SYS_RB_H_ */
224