Lines Matching refs:rb

155 INTERVAL_TREE_DEFINE(struct drm_mm_node, rb,  in INTERVAL_TREE_DEFINE()  argument
171 struct rb_node **link, *rb; in drm_mm_interval_tree_add_node() local
178 rb = &hole_node->rb; in drm_mm_interval_tree_add_node()
179 while (rb) { in drm_mm_interval_tree_add_node()
180 parent = rb_entry(rb, struct drm_mm_node, rb); in drm_mm_interval_tree_add_node()
185 rb = rb_parent(rb); in drm_mm_interval_tree_add_node()
188 rb = &hole_node->rb; in drm_mm_interval_tree_add_node()
189 link = &hole_node->rb.rb_right; in drm_mm_interval_tree_add_node()
192 rb = NULL; in drm_mm_interval_tree_add_node()
198 rb = *link; in drm_mm_interval_tree_add_node()
199 parent = rb_entry(rb, struct drm_mm_node, rb); in drm_mm_interval_tree_add_node()
203 link = &parent->rb.rb_left; in drm_mm_interval_tree_add_node()
205 link = &parent->rb.rb_right; in drm_mm_interval_tree_add_node()
210 rb_link_node(&node->rb, rb, link); in drm_mm_interval_tree_add_node()
211 rb_insert_augmented_cached(&node->rb, &mm->interval_tree, leftmost, in drm_mm_interval_tree_add_node()
216 struct rb_node **link = &root.rb_node, *rb = NULL; \
219 rb = *link; \
220 if (x < expr(rb_entry(rb, struct drm_mm_node, member))) \
221 link = &rb->rb_left; \
223 link = &rb->rb_right; \
225 rb_link_node(&node->member, rb, link); \
232 static u64 rb_to_hole_size(struct rb_node *rb) in rb_to_hole_size() argument
234 return rb_entry(rb, struct drm_mm_node, rb_hole_size)->hole_size; in rb_to_hole_size()
240 struct rb_node **link = &root->rb_root.rb_node, *rb = NULL; in insert_hole_size() local
245 rb = *link; in insert_hole_size()
246 if (x > rb_to_hole_size(rb)) { in insert_hole_size()
247 link = &rb->rb_left; in insert_hole_size()
249 link = &rb->rb_right; in insert_hole_size()
254 rb_link_node(&node->rb_hole_size, rb, link); in insert_hole_size()
284 static inline struct drm_mm_node *rb_hole_size_to_node(struct rb_node *rb) in rb_hole_size_to_node() argument
286 return rb_entry_safe(rb, struct drm_mm_node, rb_hole_size); in rb_hole_size_to_node()
289 static inline struct drm_mm_node *rb_hole_addr_to_node(struct rb_node *rb) in rb_hole_addr_to_node() argument
291 return rb_entry_safe(rb, struct drm_mm_node, rb_hole_addr); in rb_hole_addr_to_node()
294 static inline u64 rb_hole_size(struct rb_node *rb) in rb_hole_size() argument
296 return rb_entry(rb, struct drm_mm_node, rb_hole_size)->hole_size; in rb_hole_size()
301 struct rb_node *rb = mm->holes_size.rb_root.rb_node; in best_hole() local
306 rb_entry(rb, struct drm_mm_node, rb_hole_size); in best_hole()
310 rb = rb->rb_right; in best_hole()
312 rb = rb->rb_left; in best_hole()
314 } while (rb); in best_hole()
321 struct rb_node *rb = mm->holes_addr.rb_node; in find_hole() local
324 while (rb) { in find_hole()
327 node = rb_hole_addr_to_node(rb); in find_hole()
331 rb = node->rb_hole_addr.rb_left; in find_hole()
333 rb = node->rb_hole_addr.rb_right; in find_hole()
443 static u64 rb_to_hole_size_or_zero(struct rb_node *rb) in rb_to_hole_size_or_zero() argument
445 return rb ? rb_to_hole_size(rb) : 0; in rb_to_hole_size_or_zero()
613 rb_replace_node_cached(&old->rb, &new->rb, &mm->interval_tree); in drm_mm_replace_node()