maple_tree: move debug check to __mas_set_range()
__mas_set_range() was created to shortcut resetting the maple state and a debug check was added to the caller (the vma iterator) to ensure the internal maple state remains safe to use. Move the debug check from the vma iterator into the maple tree itself so other users do not incorrectly use the advanced maple state modification. Fallout from this change include a large amount of debug setup needed to be moved to earlier in the header, and the maple_tree.h radix-tree test code needed to move the inclusion of the header to after the atomic define. None of those changes have functional changes. Link: https://lkml.kernel.org/r/20231101171629.3612299-4-Liam.Howlett@oracle.com Signed-off-by: Liam R. Howlett <Liam.Howlett@oracle.com> Cc: Peng Zhang <zhangpeng.00@bytedance.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
This commit is contained in:
parent
f7a5901895
commit
bf857ddd21
|
@ -557,162 +557,6 @@ static inline void mas_reset(struct ma_state *mas)
|
|||
*/
|
||||
#define mas_for_each(__mas, __entry, __max) \
|
||||
while (((__entry) = mas_find((__mas), (__max))) != NULL)
|
||||
/**
|
||||
* __mas_set_range() - Set up Maple Tree operation state to a sub-range of the
|
||||
* current location.
|
||||
* @mas: Maple Tree operation state.
|
||||
* @start: New start of range in the Maple Tree.
|
||||
* @last: New end of range in the Maple Tree.
|
||||
*
|
||||
* set the internal maple state values to a sub-range.
|
||||
* Please use mas_set_range() if you do not know where you are in the tree.
|
||||
*/
|
||||
static inline void __mas_set_range(struct ma_state *mas, unsigned long start,
|
||||
unsigned long last)
|
||||
{
|
||||
mas->index = start;
|
||||
mas->last = last;
|
||||
}
|
||||
|
||||
/**
|
||||
* mas_set_range() - Set up Maple Tree operation state for a different index.
|
||||
* @mas: Maple Tree operation state.
|
||||
* @start: New start of range in the Maple Tree.
|
||||
* @last: New end of range in the Maple Tree.
|
||||
*
|
||||
* Move the operation state to refer to a different range. This will
|
||||
* have the effect of starting a walk from the top; see mas_next()
|
||||
* to move to an adjacent index.
|
||||
*/
|
||||
static inline
|
||||
void mas_set_range(struct ma_state *mas, unsigned long start, unsigned long last)
|
||||
{
|
||||
__mas_set_range(mas, start, last);
|
||||
mas->node = MAS_START;
|
||||
}
|
||||
|
||||
/**
|
||||
* mas_set() - Set up Maple Tree operation state for a different index.
|
||||
* @mas: Maple Tree operation state.
|
||||
* @index: New index into the Maple Tree.
|
||||
*
|
||||
* Move the operation state to refer to a different index. This will
|
||||
* have the effect of starting a walk from the top; see mas_next()
|
||||
* to move to an adjacent index.
|
||||
*/
|
||||
static inline void mas_set(struct ma_state *mas, unsigned long index)
|
||||
{
|
||||
|
||||
mas_set_range(mas, index, index);
|
||||
}
|
||||
|
||||
static inline bool mt_external_lock(const struct maple_tree *mt)
|
||||
{
|
||||
return (mt->ma_flags & MT_FLAGS_LOCK_MASK) == MT_FLAGS_LOCK_EXTERN;
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_init_flags() - Initialise an empty maple tree with flags.
|
||||
* @mt: Maple Tree
|
||||
* @flags: maple tree flags.
|
||||
*
|
||||
* If you need to initialise a Maple Tree with special flags (eg, an
|
||||
* allocation tree), use this function.
|
||||
*
|
||||
* Context: Any context.
|
||||
*/
|
||||
static inline void mt_init_flags(struct maple_tree *mt, unsigned int flags)
|
||||
{
|
||||
mt->ma_flags = flags;
|
||||
if (!mt_external_lock(mt))
|
||||
spin_lock_init(&mt->ma_lock);
|
||||
rcu_assign_pointer(mt->ma_root, NULL);
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_init() - Initialise an empty maple tree.
|
||||
* @mt: Maple Tree
|
||||
*
|
||||
* An empty Maple Tree.
|
||||
*
|
||||
* Context: Any context.
|
||||
*/
|
||||
static inline void mt_init(struct maple_tree *mt)
|
||||
{
|
||||
mt_init_flags(mt, 0);
|
||||
}
|
||||
|
||||
static inline bool mt_in_rcu(struct maple_tree *mt)
|
||||
{
|
||||
#ifdef CONFIG_MAPLE_RCU_DISABLED
|
||||
return false;
|
||||
#endif
|
||||
return mt->ma_flags & MT_FLAGS_USE_RCU;
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_clear_in_rcu() - Switch the tree to non-RCU mode.
|
||||
* @mt: The Maple Tree
|
||||
*/
|
||||
static inline void mt_clear_in_rcu(struct maple_tree *mt)
|
||||
{
|
||||
if (!mt_in_rcu(mt))
|
||||
return;
|
||||
|
||||
if (mt_external_lock(mt)) {
|
||||
WARN_ON(!mt_lock_is_held(mt));
|
||||
mt->ma_flags &= ~MT_FLAGS_USE_RCU;
|
||||
} else {
|
||||
mtree_lock(mt);
|
||||
mt->ma_flags &= ~MT_FLAGS_USE_RCU;
|
||||
mtree_unlock(mt);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_set_in_rcu() - Switch the tree to RCU safe mode.
|
||||
* @mt: The Maple Tree
|
||||
*/
|
||||
static inline void mt_set_in_rcu(struct maple_tree *mt)
|
||||
{
|
||||
if (mt_in_rcu(mt))
|
||||
return;
|
||||
|
||||
if (mt_external_lock(mt)) {
|
||||
WARN_ON(!mt_lock_is_held(mt));
|
||||
mt->ma_flags |= MT_FLAGS_USE_RCU;
|
||||
} else {
|
||||
mtree_lock(mt);
|
||||
mt->ma_flags |= MT_FLAGS_USE_RCU;
|
||||
mtree_unlock(mt);
|
||||
}
|
||||
}
|
||||
|
||||
static inline unsigned int mt_height(const struct maple_tree *mt)
|
||||
{
|
||||
return (mt->ma_flags & MT_FLAGS_HEIGHT_MASK) >> MT_FLAGS_HEIGHT_OFFSET;
|
||||
}
|
||||
|
||||
void *mt_find(struct maple_tree *mt, unsigned long *index, unsigned long max);
|
||||
void *mt_find_after(struct maple_tree *mt, unsigned long *index,
|
||||
unsigned long max);
|
||||
void *mt_prev(struct maple_tree *mt, unsigned long index, unsigned long min);
|
||||
void *mt_next(struct maple_tree *mt, unsigned long index, unsigned long max);
|
||||
|
||||
/**
|
||||
* mt_for_each - Iterate over each entry starting at index until max.
|
||||
* @__tree: The Maple Tree
|
||||
* @__entry: The current entry
|
||||
* @__index: The index to start the search from. Subsequently used as iterator.
|
||||
* @__max: The maximum limit for @index
|
||||
*
|
||||
* This iterator skips all entries, which resolve to a NULL pointer,
|
||||
* e.g. entries which has been reserved with XA_ZERO_ENTRY.
|
||||
*/
|
||||
#define mt_for_each(__tree, __entry, __index, __max) \
|
||||
for (__entry = mt_find(__tree, &(__index), __max); \
|
||||
__entry; __entry = mt_find_after(__tree, &(__index), __max))
|
||||
|
||||
|
||||
#ifdef CONFIG_DEBUG_MAPLE_TREE
|
||||
enum mt_dump_format {
|
||||
|
@ -838,4 +682,163 @@ void mt_cache_shrink(void);
|
|||
#define MAS_WR_WARN_ON(__mas, __x) WARN_ON(__x)
|
||||
#endif /* CONFIG_DEBUG_MAPLE_TREE */
|
||||
|
||||
/**
|
||||
* __mas_set_range() - Set up Maple Tree operation state to a sub-range of the
|
||||
* current location.
|
||||
* @mas: Maple Tree operation state.
|
||||
* @start: New start of range in the Maple Tree.
|
||||
* @last: New end of range in the Maple Tree.
|
||||
*
|
||||
* set the internal maple state values to a sub-range.
|
||||
* Please use mas_set_range() if you do not know where you are in the tree.
|
||||
*/
|
||||
static inline void __mas_set_range(struct ma_state *mas, unsigned long start,
|
||||
unsigned long last)
|
||||
{
|
||||
/* Ensure the range starts within the current slot */
|
||||
MAS_WARN_ON(mas, mas_is_active(mas) &&
|
||||
(mas->index > start || mas->last < start));
|
||||
mas->index = start;
|
||||
mas->last = last;
|
||||
}
|
||||
|
||||
/**
|
||||
* mas_set_range() - Set up Maple Tree operation state for a different index.
|
||||
* @mas: Maple Tree operation state.
|
||||
* @start: New start of range in the Maple Tree.
|
||||
* @last: New end of range in the Maple Tree.
|
||||
*
|
||||
* Move the operation state to refer to a different range. This will
|
||||
* have the effect of starting a walk from the top; see mas_next()
|
||||
* to move to an adjacent index.
|
||||
*/
|
||||
static inline
|
||||
void mas_set_range(struct ma_state *mas, unsigned long start, unsigned long last)
|
||||
{
|
||||
mas->node = MAS_START;
|
||||
__mas_set_range(mas, start, last);
|
||||
}
|
||||
|
||||
/**
|
||||
* mas_set() - Set up Maple Tree operation state for a different index.
|
||||
* @mas: Maple Tree operation state.
|
||||
* @index: New index into the Maple Tree.
|
||||
*
|
||||
* Move the operation state to refer to a different index. This will
|
||||
* have the effect of starting a walk from the top; see mas_next()
|
||||
* to move to an adjacent index.
|
||||
*/
|
||||
static inline void mas_set(struct ma_state *mas, unsigned long index)
|
||||
{
|
||||
|
||||
mas_set_range(mas, index, index);
|
||||
}
|
||||
|
||||
static inline bool mt_external_lock(const struct maple_tree *mt)
|
||||
{
|
||||
return (mt->ma_flags & MT_FLAGS_LOCK_MASK) == MT_FLAGS_LOCK_EXTERN;
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_init_flags() - Initialise an empty maple tree with flags.
|
||||
* @mt: Maple Tree
|
||||
* @flags: maple tree flags.
|
||||
*
|
||||
* If you need to initialise a Maple Tree with special flags (eg, an
|
||||
* allocation tree), use this function.
|
||||
*
|
||||
* Context: Any context.
|
||||
*/
|
||||
static inline void mt_init_flags(struct maple_tree *mt, unsigned int flags)
|
||||
{
|
||||
mt->ma_flags = flags;
|
||||
if (!mt_external_lock(mt))
|
||||
spin_lock_init(&mt->ma_lock);
|
||||
rcu_assign_pointer(mt->ma_root, NULL);
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_init() - Initialise an empty maple tree.
|
||||
* @mt: Maple Tree
|
||||
*
|
||||
* An empty Maple Tree.
|
||||
*
|
||||
* Context: Any context.
|
||||
*/
|
||||
static inline void mt_init(struct maple_tree *mt)
|
||||
{
|
||||
mt_init_flags(mt, 0);
|
||||
}
|
||||
|
||||
static inline bool mt_in_rcu(struct maple_tree *mt)
|
||||
{
|
||||
#ifdef CONFIG_MAPLE_RCU_DISABLED
|
||||
return false;
|
||||
#endif
|
||||
return mt->ma_flags & MT_FLAGS_USE_RCU;
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_clear_in_rcu() - Switch the tree to non-RCU mode.
|
||||
* @mt: The Maple Tree
|
||||
*/
|
||||
static inline void mt_clear_in_rcu(struct maple_tree *mt)
|
||||
{
|
||||
if (!mt_in_rcu(mt))
|
||||
return;
|
||||
|
||||
if (mt_external_lock(mt)) {
|
||||
WARN_ON(!mt_lock_is_held(mt));
|
||||
mt->ma_flags &= ~MT_FLAGS_USE_RCU;
|
||||
} else {
|
||||
mtree_lock(mt);
|
||||
mt->ma_flags &= ~MT_FLAGS_USE_RCU;
|
||||
mtree_unlock(mt);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* mt_set_in_rcu() - Switch the tree to RCU safe mode.
|
||||
* @mt: The Maple Tree
|
||||
*/
|
||||
static inline void mt_set_in_rcu(struct maple_tree *mt)
|
||||
{
|
||||
if (mt_in_rcu(mt))
|
||||
return;
|
||||
|
||||
if (mt_external_lock(mt)) {
|
||||
WARN_ON(!mt_lock_is_held(mt));
|
||||
mt->ma_flags |= MT_FLAGS_USE_RCU;
|
||||
} else {
|
||||
mtree_lock(mt);
|
||||
mt->ma_flags |= MT_FLAGS_USE_RCU;
|
||||
mtree_unlock(mt);
|
||||
}
|
||||
}
|
||||
|
||||
static inline unsigned int mt_height(const struct maple_tree *mt)
|
||||
{
|
||||
return (mt->ma_flags & MT_FLAGS_HEIGHT_MASK) >> MT_FLAGS_HEIGHT_OFFSET;
|
||||
}
|
||||
|
||||
void *mt_find(struct maple_tree *mt, unsigned long *index, unsigned long max);
|
||||
void *mt_find_after(struct maple_tree *mt, unsigned long *index,
|
||||
unsigned long max);
|
||||
void *mt_prev(struct maple_tree *mt, unsigned long index, unsigned long min);
|
||||
void *mt_next(struct maple_tree *mt, unsigned long index, unsigned long max);
|
||||
|
||||
/**
|
||||
* mt_for_each - Iterate over each entry starting at index until max.
|
||||
* @__tree: The Maple Tree
|
||||
* @__entry: The current entry
|
||||
* @__index: The index to start the search from. Subsequently used as iterator.
|
||||
* @__max: The maximum limit for @index
|
||||
*
|
||||
* This iterator skips all entries, which resolve to a NULL pointer,
|
||||
* e.g. entries which has been reserved with XA_ZERO_ENTRY.
|
||||
*/
|
||||
#define mt_for_each(__tree, __entry, __index, __max) \
|
||||
for (__entry = mt_find(__tree, &(__index), __max); \
|
||||
__entry; __entry = mt_find_after(__tree, &(__index), __max))
|
||||
|
||||
#endif /*_LINUX_MAPLE_TREE_H */
|
||||
|
|
|
@ -1135,8 +1135,6 @@ static inline bool vma_soft_dirty_enabled(struct vm_area_struct *vma)
|
|||
static inline void vma_iter_config(struct vma_iterator *vmi,
|
||||
unsigned long index, unsigned long last)
|
||||
{
|
||||
MAS_BUG_ON(&vmi->mas, vmi->mas.node != MAS_START &&
|
||||
(vmi->mas.index > index || vmi->mas.last < index));
|
||||
__mas_set_range(&vmi->mas, index, last - 1);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* SPDX-License-Identifier: GPL-2.0+ */
|
||||
#define atomic_t int32_t
|
||||
#include "../../../../include/linux/maple_tree.h"
|
||||
#define atomic_inc(x) uatomic_inc(x)
|
||||
#define atomic_read(x) uatomic_read(x)
|
||||
#define atomic_set(x, y) do {} while (0)
|
||||
#define U8_MAX UCHAR_MAX
|
||||
#include "../../../../include/linux/maple_tree.h"
|
||||
|
|
Loading…
Reference in New Issue