Lines Matching refs:wr_mas
2110 static inline void mas_store_b_node(struct ma_wr_state *wr_mas, in mas_store_b_node() argument
2117 struct ma_state *mas = wr_mas->mas; in mas_store_b_node()
2119 b_node->type = wr_mas->type; in mas_store_b_node()
2132 b_node->slot[b_end] = wr_mas->content; in mas_store_b_node()
2133 if (!wr_mas->content) in mas_store_b_node()
2140 b_node->slot[b_end] = wr_mas->entry; in mas_store_b_node()
2148 piv = mas_logical_pivot(mas, wr_mas->pivots, offset_end, wr_mas->type); in mas_store_b_node()
2151 mas_bulk_rebalance(mas, b_node->b_end, wr_mas->type); in mas_store_b_node()
2154 wr_mas->content = mas_slot_locked(mas, wr_mas->slots, in mas_store_b_node()
2157 b_node->slot[++b_end] = wr_mas->content; in mas_store_b_node()
2158 if (!wr_mas->content) in mas_store_b_node()
2164 if (slot > wr_mas->node_end) in mas_store_b_node()
2168 mas_mab_cp(mas, slot, wr_mas->node_end + 1, b_node, ++b_end); in mas_store_b_node()
2244 static inline void mas_wr_node_walk(struct ma_wr_state *wr_mas) in mas_wr_node_walk() argument
2246 struct ma_state *mas = wr_mas->mas; in mas_wr_node_walk()
2251 if (unlikely(ma_is_dense(wr_mas->type))) { in mas_wr_node_walk()
2252 wr_mas->r_max = wr_mas->r_min = mas->index; in mas_wr_node_walk()
2257 wr_mas->node = mas_mn(wr_mas->mas); in mas_wr_node_walk()
2258 wr_mas->pivots = ma_pivots(wr_mas->node, wr_mas->type); in mas_wr_node_walk()
2259 count = wr_mas->node_end = ma_data_end(wr_mas->node, wr_mas->type, in mas_wr_node_walk()
2260 wr_mas->pivots, mas->max); in mas_wr_node_walk()
2262 min = mas_safe_min(mas, wr_mas->pivots, offset); in mas_wr_node_walk()
2266 max = wr_mas->pivots[offset]; in mas_wr_node_walk()
2276 max = wr_mas->pivots[offset]; in mas_wr_node_walk()
2288 wr_mas->r_max = max; in mas_wr_node_walk()
2289 wr_mas->r_min = min; in mas_wr_node_walk()
2290 wr_mas->offset_end = mas->offset = offset; in mas_wr_node_walk()
2325 MA_WR_STATE(wr_mas, mast->orig_l, NULL); in mast_topiary()
2330 wr_mas.type = mte_node_type(mast->orig_l->node); in mast_topiary()
2332 mas_wr_node_walk(&wr_mas); in mast_topiary()
2540 MA_WR_STATE(wr_mas, mast->orig_r, NULL); in mast_ascend_free()
2560 wr_mas.type = mte_node_type(mast->orig_r->node); in mast_ascend_free()
2561 mas_wr_node_walk(&wr_mas); in mast_ascend_free()
2565 wr_mas.mas = mast->orig_l; in mast_ascend_free()
2566 wr_mas.type = mte_node_type(mast->orig_l->node); in mast_ascend_free()
2567 mas_wr_node_walk(&wr_mas); in mast_ascend_free()
2569 mast->bn->type = wr_mas.type; in mast_ascend_free()
3558 static inline bool mas_reuse_node(struct ma_wr_state *wr_mas, in mas_reuse_node() argument
3562 if (mt_in_rcu(wr_mas->mas->tree)) in mas_reuse_node()
3566 int clear = mt_slots[wr_mas->type] - bn->b_end; in mas_reuse_node()
3568 memset(wr_mas->slots + bn->b_end, 0, sizeof(void *) * clear--); in mas_reuse_node()
3569 memset(wr_mas->pivots + bn->b_end, 0, sizeof(void *) * clear); in mas_reuse_node()
3571 mab_mas_cp(bn, 0, bn->b_end, wr_mas->mas, false); in mas_reuse_node()
3581 static inline int mas_commit_b_node(struct ma_wr_state *wr_mas, in mas_commit_b_node() argument
3589 (!mte_is_root(wr_mas->mas->node)) && in mas_commit_b_node()
3590 (mas_mt_height(wr_mas->mas) > 1)) in mas_commit_b_node()
3591 return mas_rebalance(wr_mas->mas, b_node); in mas_commit_b_node()
3594 return mas_split(wr_mas->mas, b_node); in mas_commit_b_node()
3596 if (mas_reuse_node(wr_mas, b_node, end)) in mas_commit_b_node()
3599 mas_node_count(wr_mas->mas, 1); in mas_commit_b_node()
3600 if (mas_is_err(wr_mas->mas)) in mas_commit_b_node()
3603 node = mas_pop_node(wr_mas->mas); in mas_commit_b_node()
3604 node->parent = mas_mn(wr_mas->mas)->parent; in mas_commit_b_node()
3605 wr_mas->mas->node = mt_mk_node(node, b_type); in mas_commit_b_node()
3606 mab_mas_cp(b_node, 0, b_end, wr_mas->mas, false); in mas_commit_b_node()
3607 mas_replace(wr_mas->mas, false); in mas_commit_b_node()
3609 mas_update_gap(wr_mas->mas); in mas_commit_b_node()
3686 static bool mas_is_span_wr(struct ma_wr_state *wr_mas) in mas_is_span_wr() argument
3689 unsigned long last = wr_mas->mas->last; in mas_is_span_wr()
3690 unsigned long piv = wr_mas->r_max; in mas_is_span_wr()
3691 enum maple_type type = wr_mas->type; in mas_is_span_wr()
3692 void *entry = wr_mas->entry; in mas_is_span_wr()
3698 max = wr_mas->mas->max; in mas_is_span_wr()
3723 trace_ma_write(__func__, wr_mas->mas, piv, entry); in mas_is_span_wr()
3728 static inline void mas_wr_walk_descend(struct ma_wr_state *wr_mas) in mas_wr_walk_descend() argument
3730 wr_mas->type = mte_node_type(wr_mas->mas->node); in mas_wr_walk_descend()
3731 mas_wr_node_walk(wr_mas); in mas_wr_walk_descend()
3732 wr_mas->slots = ma_slots(wr_mas->node, wr_mas->type); in mas_wr_walk_descend()
3735 static inline void mas_wr_walk_traverse(struct ma_wr_state *wr_mas) in mas_wr_walk_traverse() argument
3737 wr_mas->mas->max = wr_mas->r_max; in mas_wr_walk_traverse()
3738 wr_mas->mas->min = wr_mas->r_min; in mas_wr_walk_traverse()
3739 wr_mas->mas->node = wr_mas->content; in mas_wr_walk_traverse()
3740 wr_mas->mas->offset = 0; in mas_wr_walk_traverse()
3741 wr_mas->mas->depth++; in mas_wr_walk_traverse()
3751 static bool mas_wr_walk(struct ma_wr_state *wr_mas) in mas_wr_walk() argument
3753 struct ma_state *mas = wr_mas->mas; in mas_wr_walk()
3756 mas_wr_walk_descend(wr_mas); in mas_wr_walk()
3757 if (unlikely(mas_is_span_wr(wr_mas))) in mas_wr_walk()
3760 wr_mas->content = mas_slot_locked(mas, wr_mas->slots, in mas_wr_walk()
3762 if (ma_is_leaf(wr_mas->type)) in mas_wr_walk()
3765 mas_wr_walk_traverse(wr_mas); in mas_wr_walk()
3771 static bool mas_wr_walk_index(struct ma_wr_state *wr_mas) in mas_wr_walk_index() argument
3773 struct ma_state *mas = wr_mas->mas; in mas_wr_walk_index()
3776 mas_wr_walk_descend(wr_mas); in mas_wr_walk_index()
3777 wr_mas->content = mas_slot_locked(mas, wr_mas->slots, in mas_wr_walk_index()
3779 if (ma_is_leaf(wr_mas->type)) in mas_wr_walk_index()
3781 mas_wr_walk_traverse(wr_mas); in mas_wr_walk_index()
3952 static inline int mas_wr_spanning_store(struct ma_wr_state *wr_mas) in mas_wr_spanning_store() argument
3963 MA_WR_STATE(r_wr_mas, &r_mas, wr_mas->entry); in mas_wr_spanning_store()
3964 MA_WR_STATE(l_wr_mas, &l_mas, wr_mas->entry); in mas_wr_spanning_store()
3978 mas = wr_mas->mas; in mas_wr_spanning_store()
3982 return mas_new_root(mas, wr_mas->entry); in mas_wr_spanning_store()
4010 if (!wr_mas->entry) { in mas_wr_spanning_store()
4020 return mas_new_root(mas, wr_mas->entry); in mas_wr_spanning_store()
4051 static inline bool mas_wr_node_store(struct ma_wr_state *wr_mas) in mas_wr_node_store() argument
4053 struct ma_state *mas = wr_mas->mas; in mas_wr_node_store()
4057 unsigned char new_end = wr_mas->node_end; in mas_wr_node_store()
4059 unsigned char node_slots = mt_slots[wr_mas->type]; in mas_wr_node_store()
4061 unsigned char copy_size, max_piv = mt_pivots[wr_mas->type]; in mas_wr_node_store()
4065 if (mas->last == wr_mas->r_max) { in mas_wr_node_store()
4070 wr_mas->offset_end++; in mas_wr_node_store()
4071 } else if (mas->last < wr_mas->r_max) { in mas_wr_node_store()
4073 if (unlikely(wr_mas->r_max == ULONG_MAX)) in mas_wr_node_store()
4074 mas_bulk_rebalance(mas, wr_mas->node_end, wr_mas->type); in mas_wr_node_store()
4078 if (wr_mas->end_piv == mas->last) in mas_wr_node_store()
4079 wr_mas->offset_end++; in mas_wr_node_store()
4081 new_end -= wr_mas->offset_end - offset - 1; in mas_wr_node_store()
4085 if (wr_mas->r_min < mas->index) in mas_wr_node_store()
4093 if (!mte_is_root(mas->node) && (new_end <= mt_min_slots[wr_mas->type]) && in mas_wr_node_store()
4110 dst_pivots = ma_pivots(newnode, wr_mas->type); in mas_wr_node_store()
4111 dst_slots = ma_slots(newnode, wr_mas->type); in mas_wr_node_store()
4113 memcpy(dst_pivots, wr_mas->pivots, sizeof(unsigned long) * (offset + 1)); in mas_wr_node_store()
4114 memcpy(dst_slots, wr_mas->slots, sizeof(void *) * (offset + 1)); in mas_wr_node_store()
4118 if (wr_mas->r_min < mas->index) { in mas_wr_node_store()
4120 rcu_assign_pointer(dst_slots[dst_offset], wr_mas->content); in mas_wr_node_store()
4128 rcu_assign_pointer(dst_slots[dst_offset], wr_mas->entry); in mas_wr_node_store()
4134 if (wr_mas->offset_end > wr_mas->node_end || mas->last >= mas->max) { in mas_wr_node_store()
4141 copy_size = wr_mas->node_end - wr_mas->offset_end + 1; in mas_wr_node_store()
4142 memcpy(dst_slots + dst_offset, wr_mas->slots + wr_mas->offset_end, in mas_wr_node_store()
4149 wr_mas->pivots + wr_mas->offset_end, in mas_wr_node_store()
4153 if ((wr_mas->node_end == node_slots - 1) && (new_end < node_slots - 1)) in mas_wr_node_store()
4159 mas->node = mt_mk_node(newnode, wr_mas->type); in mas_wr_node_store()
4162 memcpy(wr_mas->node, newnode, sizeof(struct maple_node)); in mas_wr_node_store()
4164 trace_ma_write(__func__, mas, 0, wr_mas->entry); in mas_wr_node_store()
4175 static inline bool mas_wr_slot_store(struct ma_wr_state *wr_mas) in mas_wr_slot_store() argument
4177 struct ma_state *mas = wr_mas->mas; in mas_wr_slot_store()
4181 if ((wr_mas->r_max > mas->last) && ((wr_mas->r_min != mas->index) || in mas_wr_slot_store()
4182 (offset != wr_mas->node_end))) in mas_wr_slot_store()
4185 if (offset == wr_mas->node_end - 1) in mas_wr_slot_store()
4188 lmax = wr_mas->pivots[offset + 1]; in mas_wr_slot_store()
4194 if (wr_mas->r_min == mas->index) { in mas_wr_slot_store()
4200 rcu_assign_pointer(wr_mas->slots[offset], wr_mas->entry); in mas_wr_slot_store()
4201 wr_mas->pivots[offset] = mas->last; in mas_wr_slot_store()
4210 if ((offset + 1 < mt_pivots[wr_mas->type]) && in mas_wr_slot_store()
4211 (wr_mas->entry || wr_mas->pivots[offset + 1])) in mas_wr_slot_store()
4212 wr_mas->pivots[offset + 1] = mas->last; in mas_wr_slot_store()
4214 rcu_assign_pointer(wr_mas->slots[offset + 1], wr_mas->entry); in mas_wr_slot_store()
4215 wr_mas->pivots[offset] = mas->index - 1; in mas_wr_slot_store()
4219 trace_ma_write(__func__, mas, 0, wr_mas->entry); in mas_wr_slot_store()
4224 static inline void mas_wr_end_piv(struct ma_wr_state *wr_mas) in mas_wr_end_piv() argument
4226 while ((wr_mas->mas->last > wr_mas->end_piv) && in mas_wr_end_piv()
4227 (wr_mas->offset_end < wr_mas->node_end)) in mas_wr_end_piv()
4228 wr_mas->end_piv = wr_mas->pivots[++wr_mas->offset_end]; in mas_wr_end_piv()
4230 if (wr_mas->mas->last > wr_mas->end_piv) in mas_wr_end_piv()
4231 wr_mas->end_piv = wr_mas->mas->max; in mas_wr_end_piv()
4234 static inline void mas_wr_extend_null(struct ma_wr_state *wr_mas) in mas_wr_extend_null() argument
4236 struct ma_state *mas = wr_mas->mas; in mas_wr_extend_null()
4238 if (mas->last < wr_mas->end_piv && !wr_mas->slots[wr_mas->offset_end]) in mas_wr_extend_null()
4239 mas->last = wr_mas->end_piv; in mas_wr_extend_null()
4242 if ((mas->last == wr_mas->end_piv) && in mas_wr_extend_null()
4243 (wr_mas->node_end != wr_mas->offset_end) && in mas_wr_extend_null()
4244 !wr_mas->slots[wr_mas->offset_end + 1]) { in mas_wr_extend_null()
4245 wr_mas->offset_end++; in mas_wr_extend_null()
4246 if (wr_mas->offset_end == wr_mas->node_end) in mas_wr_extend_null()
4249 mas->last = wr_mas->pivots[wr_mas->offset_end]; in mas_wr_extend_null()
4250 wr_mas->end_piv = mas->last; in mas_wr_extend_null()
4253 if (!wr_mas->content) { in mas_wr_extend_null()
4255 mas->index = wr_mas->r_min; in mas_wr_extend_null()
4258 if (mas->index == wr_mas->r_min && mas->offset && in mas_wr_extend_null()
4259 !wr_mas->slots[mas->offset - 1]) { in mas_wr_extend_null()
4261 wr_mas->r_min = mas->index = in mas_wr_extend_null()
4262 mas_safe_min(mas, wr_mas->pivots, mas->offset); in mas_wr_extend_null()
4263 wr_mas->r_max = wr_mas->pivots[mas->offset]; in mas_wr_extend_null()
4268 static inline bool mas_wr_append(struct ma_wr_state *wr_mas) in mas_wr_append() argument
4270 unsigned char end = wr_mas->node_end; in mas_wr_append()
4272 struct ma_state *mas = wr_mas->mas; in mas_wr_append()
4273 unsigned char node_pivots = mt_pivots[wr_mas->type]; in mas_wr_append()
4275 if ((mas->index != wr_mas->r_min) && (mas->last == wr_mas->r_max)) { in mas_wr_append()
4277 wr_mas->pivots[new_end] = wr_mas->pivots[end]; in mas_wr_append()
4280 ma_set_meta(wr_mas->node, maple_leaf_64, 0, new_end); in mas_wr_append()
4282 rcu_assign_pointer(wr_mas->slots[new_end], wr_mas->entry); in mas_wr_append()
4284 wr_mas->pivots[end] = mas->index - 1; in mas_wr_append()
4289 if ((mas->index == wr_mas->r_min) && (mas->last < wr_mas->r_max)) { in mas_wr_append()
4291 wr_mas->pivots[new_end] = wr_mas->pivots[end]; in mas_wr_append()
4293 rcu_assign_pointer(wr_mas->slots[new_end], wr_mas->content); in mas_wr_append()
4295 ma_set_meta(wr_mas->node, maple_leaf_64, 0, new_end); in mas_wr_append()
4297 wr_mas->pivots[end] = mas->last; in mas_wr_append()
4298 rcu_assign_pointer(wr_mas->slots[end], wr_mas->entry); in mas_wr_append()
4311 static void mas_wr_bnode(struct ma_wr_state *wr_mas) in mas_wr_bnode() argument
4315 trace_ma_write(__func__, wr_mas->mas, 0, wr_mas->entry); in mas_wr_bnode()
4317 mas_store_b_node(wr_mas, &b_node, wr_mas->offset_end); in mas_wr_bnode()
4318 mas_commit_b_node(wr_mas, &b_node, wr_mas->node_end); in mas_wr_bnode()
4321 static inline void mas_wr_modify(struct ma_wr_state *wr_mas) in mas_wr_modify() argument
4325 struct ma_state *mas = wr_mas->mas; in mas_wr_modify()
4328 if (wr_mas->r_min == mas->index && wr_mas->r_max == mas->last) { in mas_wr_modify()
4329 rcu_assign_pointer(wr_mas->slots[mas->offset], wr_mas->entry); in mas_wr_modify()
4330 if (!!wr_mas->entry ^ !!wr_mas->content) in mas_wr_modify()
4336 node_slots = mt_slots[wr_mas->type]; in mas_wr_modify()
4337 node_size = wr_mas->node_end - wr_mas->offset_end + mas->offset + 2; in mas_wr_modify()
4345 if (wr_mas->entry && (wr_mas->node_end < node_slots - 1) && in mas_wr_modify()
4346 (mas->offset == wr_mas->node_end) && mas_wr_append(wr_mas)) { in mas_wr_modify()
4347 if (!wr_mas->content || !wr_mas->entry) in mas_wr_modify()
4352 if ((wr_mas->offset_end - mas->offset <= 1) && mas_wr_slot_store(wr_mas)) in mas_wr_modify()
4354 else if (mas_wr_node_store(wr_mas)) in mas_wr_modify()
4361 mas_wr_bnode(wr_mas); in mas_wr_modify()
4371 static inline void *mas_wr_store_entry(struct ma_wr_state *wr_mas) in mas_wr_store_entry() argument
4373 struct ma_state *mas = wr_mas->mas; in mas_wr_store_entry()
4375 wr_mas->content = mas_start(mas); in mas_wr_store_entry()
4377 mas_store_root(mas, wr_mas->entry); in mas_wr_store_entry()
4378 return wr_mas->content; in mas_wr_store_entry()
4381 if (unlikely(!mas_wr_walk(wr_mas))) { in mas_wr_store_entry()
4382 mas_wr_spanning_store(wr_mas); in mas_wr_store_entry()
4383 return wr_mas->content; in mas_wr_store_entry()
4387 wr_mas->end_piv = wr_mas->r_max; in mas_wr_store_entry()
4388 mas_wr_end_piv(wr_mas); in mas_wr_store_entry()
4390 if (!wr_mas->entry) in mas_wr_store_entry()
4391 mas_wr_extend_null(wr_mas); in mas_wr_store_entry()
4395 mas_new_root(mas, wr_mas->entry); in mas_wr_store_entry()
4396 return wr_mas->content; in mas_wr_store_entry()
4399 mas_wr_modify(wr_mas); in mas_wr_store_entry()
4400 return wr_mas->content; in mas_wr_store_entry()
4413 MA_WR_STATE(wr_mas, mas, entry); in mas_insert()
4429 wr_mas.content = mas_start(mas); in mas_insert()
4430 if (wr_mas.content) in mas_insert()
4439 if (!mas_wr_walk(&wr_mas)) in mas_insert()
4443 wr_mas.offset_end = mas->offset; in mas_insert()
4444 wr_mas.end_piv = wr_mas.r_max; in mas_insert()
4446 if (wr_mas.content || (mas->last > wr_mas.r_max)) in mas_insert()
4452 mas_wr_modify(&wr_mas); in mas_insert()
4453 return wr_mas.content; in mas_insert()
4457 return wr_mas.content; in mas_insert()
5159 MA_WR_STATE(wr_mas, mas, entry); in mas_fill_gap()
5185 mas_wr_store_entry(&wr_mas); in mas_fill_gap()
5604 static void mas_wr_store_setup(struct ma_wr_state *wr_mas) in mas_wr_store_setup() argument
5606 if (!mas_is_start(wr_mas->mas)) { in mas_wr_store_setup()
5607 if (mas_is_none(wr_mas->mas)) { in mas_wr_store_setup()
5608 mas_reset(wr_mas->mas); in mas_wr_store_setup()
5610 wr_mas->r_max = wr_mas->mas->max; in mas_wr_store_setup()
5611 wr_mas->type = mte_node_type(wr_mas->mas->node); in mas_wr_store_setup()
5612 if (mas_is_span_wr(wr_mas)) in mas_wr_store_setup()
5613 mas_reset(wr_mas->mas); in mas_wr_store_setup()
5634 MA_WR_STATE(wr_mas, mas, entry); in mas_store()
5654 mas_wr_store_setup(&wr_mas); in mas_store()
5655 mas_wr_store_entry(&wr_mas); in mas_store()
5656 return wr_mas.content; in mas_store()
5671 MA_WR_STATE(wr_mas, mas, entry); in mas_store_gfp()
5673 mas_wr_store_setup(&wr_mas); in mas_store_gfp()
5676 mas_wr_store_entry(&wr_mas); in mas_store_gfp()
5695 MA_WR_STATE(wr_mas, mas, entry); in mas_store_prealloc()
5697 mas_wr_store_setup(&wr_mas); in mas_store_prealloc()
5699 mas_wr_store_entry(&wr_mas); in mas_store_prealloc()
6079 MA_WR_STATE(wr_mas, mas, NULL); in mas_erase()
6092 mas_wr_store_setup(&wr_mas); in mas_erase()
6093 mas_wr_store_entry(&wr_mas); in mas_erase()
6191 MA_WR_STATE(wr_mas, &mas, entry); in mtree_store_range()
6202 mas_wr_store_entry(&wr_mas); in mtree_store_range()