maple_tree: add a fast path case in mas_wr_slot_store()

When expanding a range in two directions, only partially overwriting the
previous and next ranges, the number of entries will not be increased, so
we can just update the pivots as a fast path. However, it may introduce
potential risks in RCU mode, because it updates two pivots. We only
enable it in non-RCU mode.

Link: https://lkml.kernel.org/r/20230628073657.75314-5-zhangpeng.00@bytedance.com
Signed-off-by: Peng Zhang <zhangpeng.00@bytedance.com>
Reviewed-by: Liam R. Howlett <Liam.Howlett@oracle.com>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
This commit is contained in:
Peng Zhang 2023-06-28 15:36:57 +08:00 committed by Andrew Morton
parent 23e9dde0b2
commit 64891ba3e5

View file

@ -4168,23 +4168,35 @@ static inline bool mas_wr_slot_store(struct ma_wr_state *wr_mas)
{ {
struct ma_state *mas = wr_mas->mas; struct ma_state *mas = wr_mas->mas;
unsigned char offset = mas->offset; unsigned char offset = mas->offset;
void __rcu **slots = wr_mas->slots;
bool gap = false; bool gap = false;
if (wr_mas->offset_end - offset != 1) gap |= !mt_slot_locked(mas->tree, slots, offset);
return false; gap |= !mt_slot_locked(mas->tree, slots, offset + 1);
gap |= !mt_slot_locked(mas->tree, wr_mas->slots, offset); if (wr_mas->offset_end - offset == 1) {
gap |= !mt_slot_locked(mas->tree, wr_mas->slots, offset + 1); if (mas->index == wr_mas->r_min) {
/* Overwriting the range and a part of the next one */
if (mas->index == wr_mas->r_min) { rcu_assign_pointer(slots[offset], wr_mas->entry);
/* Overwriting the range and over a part of the next range. */ wr_mas->pivots[offset] = mas->last;
rcu_assign_pointer(wr_mas->slots[offset], wr_mas->entry); } else {
wr_mas->pivots[offset] = mas->last; /* Overwriting a part of the range and the next one */
} else { rcu_assign_pointer(slots[offset + 1], wr_mas->entry);
/* Overwriting a part of the range and over the next range */ wr_mas->pivots[offset] = mas->index - 1;
rcu_assign_pointer(wr_mas->slots[offset + 1], wr_mas->entry); mas->offset++; /* Keep mas accurate. */
}
} else if (!mt_in_rcu(mas->tree)) {
/*
* Expand the range, only partially overwriting the previous and
* next ranges
*/
gap |= !mt_slot_locked(mas->tree, slots, offset + 2);
rcu_assign_pointer(slots[offset + 1], wr_mas->entry);
wr_mas->pivots[offset] = mas->index - 1; wr_mas->pivots[offset] = mas->index - 1;
wr_mas->pivots[offset + 1] = mas->last;
mas->offset++; /* Keep mas accurate. */ mas->offset++; /* Keep mas accurate. */
} else {
return false;
} }
trace_ma_write(__func__, mas, 0, wr_mas->entry); trace_ma_write(__func__, mas, 0, wr_mas->entry);