Lines Matching +full:fine +full:- +full:tune
21 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
37 * an RB-tree is used instead. At least if we expect heavy fragmentation.
42 * Thomas Hellström <thomas-at-tungstengraphics-dot-com>
88 * Two behaviors are supported for searching and allocating: bottom-up and
89 * top-down. The default is bottom-up. Top-down allocation can be used if the
95 * Note that this range allocator is not thread-safe, drivers need to protect
115 node->stack = stack_depot_save(entries, n, GFP_NOWAIT); in save_stack()
130 if (!node->stack) { in show_leaks()
132 node->start, node->size); in show_leaks()
136 nr_entries = stack_depot_fetch(node->stack, &entries); in show_leaks()
139 node->start, node->size, buf); in show_leaks()
152 #define START(node) ((node)->start)
153 #define LAST(node) ((node)->start + (node)->size - 1)
162 return drm_mm_interval_tree_iter_first((struct rb_root_cached *)&mm->interval_tree, in INTERVAL_TREE_DEFINE()
163 start, last) ?: (struct drm_mm_node *)&mm->head_node; in INTERVAL_TREE_DEFINE()
170 struct drm_mm *mm = hole_node->mm; in drm_mm_interval_tree_add_node()
175 node->__subtree_last = LAST(node); in drm_mm_interval_tree_add_node()
178 rb = &hole_node->rb; in drm_mm_interval_tree_add_node()
181 if (parent->__subtree_last >= node->__subtree_last) in drm_mm_interval_tree_add_node()
184 parent->__subtree_last = node->__subtree_last; in drm_mm_interval_tree_add_node()
188 rb = &hole_node->rb; in drm_mm_interval_tree_add_node()
189 link = &hole_node->rb.rb_right; in drm_mm_interval_tree_add_node()
193 link = &mm->interval_tree.rb_root.rb_node; in drm_mm_interval_tree_add_node()
200 if (parent->__subtree_last < node->__subtree_last) in drm_mm_interval_tree_add_node()
201 parent->__subtree_last = node->__subtree_last; in drm_mm_interval_tree_add_node()
202 if (node->start < parent->start) { in drm_mm_interval_tree_add_node()
203 link = &parent->rb.rb_left; in drm_mm_interval_tree_add_node()
205 link = &parent->rb.rb_right; in drm_mm_interval_tree_add_node()
210 rb_link_node(&node->rb, rb, link); in drm_mm_interval_tree_add_node()
211 rb_insert_augmented_cached(&node->rb, &mm->interval_tree, leftmost, in drm_mm_interval_tree_add_node()
215 #define HOLE_SIZE(NODE) ((NODE)->hole_size)
220 return rb_entry(rb, struct drm_mm_node, rb_hole_size)->hole_size; in rb_to_hole_size()
226 struct rb_node **link = &root->rb_root.rb_node, *rb = NULL; in insert_hole_size()
227 u64 x = node->hole_size; in insert_hole_size()
233 link = &rb->rb_left; in insert_hole_size()
235 link = &rb->rb_right; in insert_hole_size()
240 rb_link_node(&node->rb_hole_size, rb, link); in insert_hole_size()
241 rb_insert_color_cached(&node->rb_hole_size, root, first); in insert_hole_size()
250 struct rb_node **link = &root->rb_node, *rb_parent = NULL; in RB_DECLARE_CALLBACKS_MAX()
251 u64 start = HOLE_ADDR(node), subtree_max_hole = node->subtree_max_hole; in RB_DECLARE_CALLBACKS_MAX()
257 if (parent->subtree_max_hole < subtree_max_hole) in RB_DECLARE_CALLBACKS_MAX()
258 parent->subtree_max_hole = subtree_max_hole; in RB_DECLARE_CALLBACKS_MAX()
260 link = &parent->rb_hole_addr.rb_left; in RB_DECLARE_CALLBACKS_MAX()
262 link = &parent->rb_hole_addr.rb_right; in RB_DECLARE_CALLBACKS_MAX()
265 rb_link_node(&node->rb_hole_addr, rb_parent, link); in RB_DECLARE_CALLBACKS_MAX()
266 rb_insert_augmented(&node->rb_hole_addr, root, &augment_callbacks); in RB_DECLARE_CALLBACKS_MAX()
271 struct drm_mm *mm = node->mm; in add_hole()
273 node->hole_size = in add_hole()
274 __drm_mm_hole_node_end(node) - __drm_mm_hole_node_start(node); in add_hole()
275 node->subtree_max_hole = node->hole_size; in add_hole()
278 insert_hole_size(&mm->holes_size, node); in add_hole()
279 insert_hole_addr(&mm->holes_addr, node); in add_hole()
281 list_add(&node->hole_stack, &mm->hole_stack); in add_hole()
288 list_del(&node->hole_stack); in rm_hole()
289 rb_erase_cached(&node->rb_hole_size, &node->mm->holes_size); in rm_hole()
290 rb_erase_augmented(&node->rb_hole_addr, &node->mm->holes_addr, in rm_hole()
292 node->hole_size = 0; in rm_hole()
293 node->subtree_max_hole = 0; in rm_hole()
310 struct rb_node *rb = mm->holes_size.rb_root.rb_node; in best_hole()
317 if (size <= node->hole_size) { in best_hole()
319 rb = rb->rb_right; in best_hole()
321 rb = rb->rb_left; in best_hole()
330 return rb && rb_hole_addr_to_node(rb)->subtree_max_hole >= size; in usable_hole_addr()
335 struct rb_node *rb = mm->holes_addr.rb_node; in find_hole_addr()
348 rb = node->rb_hole_addr.rb_left; in find_hole_addr()
349 else if (addr > hole_start + node->hole_size) in find_hole_addr()
350 rb = node->rb_hole_addr.rb_right; in find_hole_addr()
375 return list_first_entry_or_null(&mm->hole_stack, in first_hole()
382 * DECLARE_NEXT_HOLE_ADDR - macro to declare next hole functions
395 struct rb_node *parent, *node = &entry->rb_hole_addr; \
400 if (usable_hole_addr(node->first, size)) { \
401 node = node->first; \
402 while (usable_hole_addr(node->last, size)) \
403 node = node->last; \
407 while ((parent = rb_parent(node)) && node == parent->first) \
425 return rb_hole_size_to_node(rb_prev(&node->rb_hole_size)); in DECLARE_NEXT_HOLE_ADDR()
435 return &node->hole_stack == &mm->hole_stack ? NULL : node; in DECLARE_NEXT_HOLE_ADDR()
440 * drm_mm_reserve_node - insert an pre-initialized node
444 * This functions inserts an already set-up &drm_mm_node into the allocator,
447 * preallocated objects which must be set-up before the range allocator can be
448 * set-up, e.g. when taking over a firmware framebuffer.
451 * 0 on success, -ENOSPC if there's no hole where @node is.
460 end = node->start + node->size; in drm_mm_reserve_node()
461 if (unlikely(end <= node->start)) in drm_mm_reserve_node()
462 return -ENOSPC; in drm_mm_reserve_node()
465 hole = find_hole_addr(mm, node->start, 0); in drm_mm_reserve_node()
467 return -ENOSPC; in drm_mm_reserve_node()
470 adj_end = hole_end = hole_start + hole->hole_size; in drm_mm_reserve_node()
472 if (mm->color_adjust) in drm_mm_reserve_node()
473 mm->color_adjust(hole, node->color, &adj_start, &adj_end); in drm_mm_reserve_node()
475 if (adj_start > node->start || adj_end < end) in drm_mm_reserve_node()
476 return -ENOSPC; in drm_mm_reserve_node()
478 node->mm = mm; in drm_mm_reserve_node()
480 __set_bit(DRM_MM_NODE_ALLOCATED_BIT, &node->flags); in drm_mm_reserve_node()
481 list_add(&node->node_list, &hole->node_list); in drm_mm_reserve_node()
483 node->hole_size = 0; in drm_mm_reserve_node()
486 if (node->start > hole_start) in drm_mm_reserve_node()
502 * drm_mm_insert_node_in_range - ranged search for space and insert @node
510 * @mode: fine-tune the allocation search and placement
515 * 0 on success, -ENOSPC if there's no suitable hole.
530 if (unlikely(size == 0 || range_end - range_start < size)) in drm_mm_insert_node_in_range()
531 return -ENOSPC; in drm_mm_insert_node_in_range()
533 if (rb_to_hole_size_or_zero(rb_first_cached(&mm->holes_size)) < size) in drm_mm_insert_node_in_range()
534 return -ENOSPC; in drm_mm_insert_node_in_range()
542 remainder_mask = is_power_of_2(alignment) ? alignment - 1 : 0; in drm_mm_insert_node_in_range()
547 u64 hole_end = hole_start + hole->hole_size; in drm_mm_insert_node_in_range()
559 if (mm->color_adjust) in drm_mm_insert_node_in_range()
560 mm->color_adjust(hole, color, &col_start, &col_end); in drm_mm_insert_node_in_range()
565 if (adj_end <= adj_start || adj_end - adj_start < size) in drm_mm_insert_node_in_range()
569 adj_start = adj_end - size; in drm_mm_insert_node_in_range()
579 adj_start -= rem; in drm_mm_insert_node_in_range()
584 min(col_end, range_end) - adj_start < size) in drm_mm_insert_node_in_range()
588 adj_end - adj_start < size) in drm_mm_insert_node_in_range()
593 node->mm = mm; in drm_mm_insert_node_in_range()
594 node->size = size; in drm_mm_insert_node_in_range()
595 node->start = adj_start; in drm_mm_insert_node_in_range()
596 node->color = color; in drm_mm_insert_node_in_range()
597 node->hole_size = 0; in drm_mm_insert_node_in_range()
599 __set_bit(DRM_MM_NODE_ALLOCATED_BIT, &node->flags); in drm_mm_insert_node_in_range()
600 list_add(&node->node_list, &hole->node_list); in drm_mm_insert_node_in_range()
613 return -ENOSPC; in drm_mm_insert_node_in_range()
619 return test_bit(DRM_MM_NODE_SCANNED_BIT, &node->flags); in drm_mm_node_scanned_block()
623 * drm_mm_remove_node - Remove a memory node from the allocator.
627 * be cleared again before it can be re-inserted into this or any other drm_mm
632 struct drm_mm *mm = node->mm; in drm_mm_remove_node()
643 drm_mm_interval_tree_remove(node, &mm->interval_tree); in drm_mm_remove_node()
644 list_del(&node->node_list); in drm_mm_remove_node()
650 clear_bit_unlock(DRM_MM_NODE_ALLOCATED_BIT, &node->flags); in drm_mm_remove_node()
655 * drm_mm_replace_node - move an allocation from @old to @new
665 struct drm_mm *mm = old->mm; in drm_mm_replace_node()
671 __set_bit(DRM_MM_NODE_ALLOCATED_BIT, &new->flags); in drm_mm_replace_node()
672 list_replace(&old->node_list, &new->node_list); in drm_mm_replace_node()
673 rb_replace_node_cached(&old->rb, &new->rb, &mm->interval_tree); in drm_mm_replace_node()
676 list_replace(&old->hole_stack, &new->hole_stack); in drm_mm_replace_node()
677 rb_replace_node_cached(&old->rb_hole_size, in drm_mm_replace_node()
678 &new->rb_hole_size, in drm_mm_replace_node()
679 &mm->holes_size); in drm_mm_replace_node()
680 rb_replace_node(&old->rb_hole_addr, in drm_mm_replace_node()
681 &new->rb_hole_addr, in drm_mm_replace_node()
682 &mm->holes_addr); in drm_mm_replace_node()
685 clear_bit_unlock(DRM_MM_NODE_ALLOCATED_BIT, &old->flags); in drm_mm_replace_node()
699 * The DRM range allocator supports this use-case through the scanning
722 * drm_mm_scan_init_with_range - initialize range-restricted lru scanning
730 * @mode: fine-tune the allocation search and placement
736 * As long as the scan list is non-empty, no other operations than
749 DRM_MM_BUG_ON(!size || size > end - start); in drm_mm_scan_init_with_range()
750 DRM_MM_BUG_ON(mm->scan_active); in drm_mm_scan_init_with_range()
752 scan->mm = mm; in drm_mm_scan_init_with_range()
757 scan->color = color; in drm_mm_scan_init_with_range()
758 scan->alignment = alignment; in drm_mm_scan_init_with_range()
759 scan->remainder_mask = is_power_of_2(alignment) ? alignment - 1 : 0; in drm_mm_scan_init_with_range()
760 scan->size = size; in drm_mm_scan_init_with_range()
761 scan->mode = mode; in drm_mm_scan_init_with_range()
764 scan->range_start = start; in drm_mm_scan_init_with_range()
765 scan->range_end = end; in drm_mm_scan_init_with_range()
767 scan->hit_start = U64_MAX; in drm_mm_scan_init_with_range()
768 scan->hit_end = 0; in drm_mm_scan_init_with_range()
773 * drm_mm_scan_add_block - add a node to the scan list
786 struct drm_mm *mm = scan->mm; in drm_mm_scan_add_block()
792 DRM_MM_BUG_ON(node->mm != mm); in drm_mm_scan_add_block()
795 __set_bit(DRM_MM_NODE_SCANNED_BIT, &node->flags); in drm_mm_scan_add_block()
796 mm->scan_active++; in drm_mm_scan_add_block()
805 __list_del_entry(&node->node_list); in drm_mm_scan_add_block()
812 if (mm->color_adjust) in drm_mm_scan_add_block()
813 mm->color_adjust(hole, scan->color, &col_start, &col_end); in drm_mm_scan_add_block()
815 adj_start = max(col_start, scan->range_start); in drm_mm_scan_add_block()
816 adj_end = min(col_end, scan->range_end); in drm_mm_scan_add_block()
817 if (adj_end <= adj_start || adj_end - adj_start < scan->size) in drm_mm_scan_add_block()
820 if (scan->mode == DRM_MM_INSERT_HIGH) in drm_mm_scan_add_block()
821 adj_start = adj_end - scan->size; in drm_mm_scan_add_block()
823 if (scan->alignment) { in drm_mm_scan_add_block()
826 if (likely(scan->remainder_mask)) in drm_mm_scan_add_block()
827 rem = adj_start & scan->remainder_mask; in drm_mm_scan_add_block()
829 div64_u64_rem(adj_start, scan->alignment, &rem); in drm_mm_scan_add_block()
831 adj_start -= rem; in drm_mm_scan_add_block()
832 if (scan->mode != DRM_MM_INSERT_HIGH) in drm_mm_scan_add_block()
833 adj_start += scan->alignment; in drm_mm_scan_add_block()
834 if (adj_start < max(col_start, scan->range_start) || in drm_mm_scan_add_block()
835 min(col_end, scan->range_end) - adj_start < scan->size) in drm_mm_scan_add_block()
839 adj_end - adj_start < scan->size) in drm_mm_scan_add_block()
844 scan->hit_start = adj_start; in drm_mm_scan_add_block()
845 scan->hit_end = adj_start + scan->size; in drm_mm_scan_add_block()
847 DRM_MM_BUG_ON(scan->hit_start >= scan->hit_end); in drm_mm_scan_add_block()
848 DRM_MM_BUG_ON(scan->hit_start < hole_start); in drm_mm_scan_add_block()
849 DRM_MM_BUG_ON(scan->hit_end > hole_end); in drm_mm_scan_add_block()
856 * drm_mm_scan_remove_block - remove a node from the scan list
879 DRM_MM_BUG_ON(node->mm != scan->mm); in drm_mm_scan_remove_block()
881 __clear_bit(DRM_MM_NODE_SCANNED_BIT, &node->flags); in drm_mm_scan_remove_block()
883 DRM_MM_BUG_ON(!node->mm->scan_active); in drm_mm_scan_remove_block()
884 node->mm->scan_active--; in drm_mm_scan_remove_block()
890 * backwards correctly we check that prev_node->next == node->next, in drm_mm_scan_remove_block()
897 list_add(&node->node_list, &prev_node->node_list); in drm_mm_scan_remove_block()
899 return (node->start + node->size > scan->hit_start && in drm_mm_scan_remove_block()
900 node->start < scan->hit_end); in drm_mm_scan_remove_block()
905 * drm_mm_scan_color_evict - evict overlapping nodes on either side of hole
917 struct drm_mm *mm = scan->mm; in drm_mm_scan_color_evict()
921 DRM_MM_BUG_ON(list_empty(&mm->hole_stack)); in drm_mm_scan_color_evict()
923 if (!mm->color_adjust) in drm_mm_scan_color_evict()
928 * in the hole_stack list, but due to side-effects in the driver it in drm_mm_scan_color_evict()
931 list_for_each_entry(hole, &mm->hole_stack, hole_stack) { in drm_mm_scan_color_evict()
933 hole_end = hole_start + hole->hole_size; in drm_mm_scan_color_evict()
935 if (hole_start <= scan->hit_start && in drm_mm_scan_color_evict()
936 hole_end >= scan->hit_end) in drm_mm_scan_color_evict()
941 DRM_MM_BUG_ON(&hole->hole_stack == &mm->hole_stack); in drm_mm_scan_color_evict()
942 if (unlikely(&hole->hole_stack == &mm->hole_stack)) in drm_mm_scan_color_evict()
945 DRM_MM_BUG_ON(hole_start > scan->hit_start); in drm_mm_scan_color_evict()
946 DRM_MM_BUG_ON(hole_end < scan->hit_end); in drm_mm_scan_color_evict()
948 mm->color_adjust(hole, scan->color, &hole_start, &hole_end); in drm_mm_scan_color_evict()
949 if (hole_start > scan->hit_start) in drm_mm_scan_color_evict()
951 if (hole_end < scan->hit_end) in drm_mm_scan_color_evict()
959 * drm_mm_init - initialize a drm-mm allocator
970 mm->color_adjust = NULL; in drm_mm_init()
972 INIT_LIST_HEAD(&mm->hole_stack); in drm_mm_init()
973 mm->interval_tree = RB_ROOT_CACHED; in drm_mm_init()
974 mm->holes_size = RB_ROOT_CACHED; in drm_mm_init()
975 mm->holes_addr = RB_ROOT; in drm_mm_init()
978 INIT_LIST_HEAD(&mm->head_node.node_list); in drm_mm_init()
979 mm->head_node.flags = 0; in drm_mm_init()
980 mm->head_node.mm = mm; in drm_mm_init()
981 mm->head_node.start = start + size; in drm_mm_init()
982 mm->head_node.size = -size; in drm_mm_init()
983 add_hole(&mm->head_node); in drm_mm_init()
985 mm->scan_active = 0; in drm_mm_init()
990 * drm_mm_takedown - clean up a drm_mm allocator
1008 size = entry->hole_size; in drm_mm_dump_hole()
1011 drm_printf(p, "%#018llx-%#018llx: %llu: free\n", in drm_mm_dump_hole()
1018 * drm_mm_print - print allocator state
1027 total_free += drm_mm_dump_hole(p, &mm->head_node); in drm_mm_print()
1030 drm_printf(p, "%#018llx-%#018llx: %llu: used\n", entry->start, in drm_mm_print()
1031 entry->start + entry->size, entry->size); in drm_mm_print()
1032 total_used += entry->size; in drm_mm_print()