#include <linux/maple_tree.h>
#include <linux/module.h>
#include <linux/rwsem.h>
#define MTREE_ALLOC_MAX …
#define CONFIG_MAPLE_SEARCH
#define MAPLE_32BIT …
#ifndef CONFIG_DEBUG_MAPLE_TREE
#define mt_dump …
#define mt_validate …
#define mt_cache_shrink …
#define mas_dump …
#define mas_wr_dump …
atomic_t maple_tree_tests_run;
atomic_t maple_tree_tests_passed;
#undef MT_BUG_ON
#define MT_BUG_ON …
#endif
#ifdef __KERNEL__
#define mt_set_non_kernel(x) …
#define mt_zero_nr_tallocated(x) …
#else
#define cond_resched …
#endif
#define mas_is_none(x) …
#define mas_is_overflow(x) …
#define mas_is_underflow(x) …
static int __init mtree_insert_index(struct maple_tree *mt,
unsigned long index, gfp_t gfp)
{ … }
static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
{ … }
static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
void *ptr)
{ … }
static int __init mtree_test_store_range(struct maple_tree *mt,
unsigned long start, unsigned long end, void *ptr)
{ … }
static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
void *ptr)
{ … }
static int __init mtree_test_insert_range(struct maple_tree *mt,
unsigned long start, unsigned long end, void *ptr)
{ … }
static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
{ … }
static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
{ … }
#if defined(CONFIG_64BIT)
static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
unsigned long start, unsigned long end, unsigned long size,
unsigned long expected, int eret, void *ptr)
{ … }
static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
unsigned long start, unsigned long end, unsigned long size,
unsigned long expected, int eret, void *ptr)
{ … }
#endif
static noinline void __init check_load(struct maple_tree *mt,
unsigned long index, void *ptr)
{ … }
static noinline void __init check_store_range(struct maple_tree *mt,
unsigned long start, unsigned long end, void *ptr, int expected)
{ … }
static noinline void __init check_insert_range(struct maple_tree *mt,
unsigned long start, unsigned long end, void *ptr, int expected)
{ … }
static noinline void __init check_insert(struct maple_tree *mt,
unsigned long index, void *ptr)
{ … }
static noinline void __init check_dup_insert(struct maple_tree *mt,
unsigned long index, void *ptr)
{ … }
static noinline void __init check_index_load(struct maple_tree *mt,
unsigned long index)
{ … }
static inline __init int not_empty(struct maple_node *node)
{ … }
static noinline void __init check_rev_seq(struct maple_tree *mt,
unsigned long max, bool verbose)
{ … }
static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
bool verbose)
{ … }
static noinline void __init check_lb_not_empty(struct maple_tree *mt)
{ … }
static noinline void __init check_lower_bound_split(struct maple_tree *mt)
{ … }
static noinline void __init check_upper_bound_split(struct maple_tree *mt)
{ … }
static noinline void __init check_mid_split(struct maple_tree *mt)
{ … }
static noinline void __init check_rev_find(struct maple_tree *mt)
{ … }
static noinline void __init check_find(struct maple_tree *mt)
{ … }
static noinline void __init check_find_2(struct maple_tree *mt)
{ … }
#if defined(CONFIG_64BIT)
static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
{ … }
static noinline void __init check_alloc_range(struct maple_tree *mt)
{ … }
#endif
static noinline void __init check_ranges(struct maple_tree *mt)
{ … }
static noinline void __init check_next_entry(struct maple_tree *mt)
{ … }
static noinline void __init check_prev_entry(struct maple_tree *mt)
{ … }
static noinline void __init check_root_expand(struct maple_tree *mt)
{ … }
static noinline void __init check_gap_combining(struct maple_tree *mt)
{ … }
static noinline void __init check_node_overwrite(struct maple_tree *mt)
{ … }
#if defined(BENCH_SLOT_STORE)
static noinline void __init bench_slot_store(struct maple_tree *mt)
{
int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
for (i = 0; i < max; i += 10)
mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
for (i = 0; i < count; i++) {
mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
GFP_KERNEL);
}
}
#endif
#if defined(BENCH_NODE_STORE)
static noinline void __init bench_node_store(struct maple_tree *mt)
{
int i, overwrite = 76, max = 240, count = 20000000;
for (i = 0; i < max; i += 10)
mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
for (i = 0; i < count; i++) {
mtree_store_range(mt, overwrite, overwrite + 15,
xa_mk_value(overwrite), GFP_KERNEL);
overwrite += 5;
if (overwrite >= 135)
overwrite = 76;
}
}
#endif
#if defined(BENCH_AWALK)
static noinline void __init bench_awalk(struct maple_tree *mt)
{
int i, max = 2500, count = 50000000;
MA_STATE(mas, mt, 1470, 1470);
for (i = 0; i < max; i += 10)
mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
for (i = 0; i < count; i++) {
mas_empty_area_rev(&mas, 0, 2000, 10);
mas_reset(&mas);
}
}
#endif
#if defined(BENCH_WALK)
static noinline void __init bench_walk(struct maple_tree *mt)
{
int i, max = 2500, count = 550000000;
MA_STATE(mas, mt, 1470, 1470);
for (i = 0; i < max; i += 10)
mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
for (i = 0; i < count; i++) {
mas_walk(&mas);
mas_reset(&mas);
}
}
#endif
#if defined(BENCH_LOAD)
static noinline void __init bench_load(struct maple_tree *mt)
{
int i, max = 2500, count = 550000000;
for (i = 0; i < max; i += 10)
mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
for (i = 0; i < count; i++)
mtree_load(mt, 1470);
}
#endif
#if defined(BENCH_MT_FOR_EACH)
static noinline void __init bench_mt_for_each(struct maple_tree *mt)
{
int i, count = 1000000;
unsigned long max = 2500, index = 0;
void *entry;
for (i = 0; i < max; i += 5)
mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
for (i = 0; i < count; i++) {
unsigned long j = 0;
mt_for_each(mt, entry, index, max) {
MT_BUG_ON(mt, entry != xa_mk_value(j));
j += 5;
}
index = 0;
}
}
#endif
#if defined(BENCH_MAS_FOR_EACH)
static noinline void __init bench_mas_for_each(struct maple_tree *mt)
{
int i, count = 1000000;
unsigned long max = 2500;
void *entry;
MA_STATE(mas, mt, 0, 0);
for (i = 0; i < max; i += 5) {
int gap = 4;
if (i % 30 == 0)
gap = 3;
mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
}
rcu_read_lock();
for (i = 0; i < count; i++) {
unsigned long j = 0;
mas_for_each(&mas, entry, max) {
MT_BUG_ON(mt, entry != xa_mk_value(j));
j += 5;
}
mas_set(&mas, 0);
}
rcu_read_unlock();
}
#endif
#if defined(BENCH_MAS_PREV)
static noinline void __init bench_mas_prev(struct maple_tree *mt)
{
int i, count = 1000000;
unsigned long max = 2500;
void *entry;
MA_STATE(mas, mt, 0, 0);
for (i = 0; i < max; i += 5) {
int gap = 4;
if (i % 30 == 0)
gap = 3;
mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
}
rcu_read_lock();
for (i = 0; i < count; i++) {
unsigned long j = 2495;
mas_set(&mas, ULONG_MAX);
while ((entry = mas_prev(&mas, 0)) != NULL) {
MT_BUG_ON(mt, entry != xa_mk_value(j));
j -= 5;
}
}
rcu_read_unlock();
}
#endif
static noinline void __init check_forking(void)
{ … }
static noinline void __init check_iteration(struct maple_tree *mt)
{ … }
static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
{ … }
#if defined(BENCH_FORK)
static noinline void __init bench_forking(void)
{
struct maple_tree mt, newmt;
int i, nr_entries = 134, nr_fork = 80000, ret;
void *val;
MA_STATE(mas, &mt, 0, 0);
MA_STATE(newmas, &newmt, 0, 0);
struct rw_semaphore mt_lock, newmt_lock;
init_rwsem(&mt_lock);
init_rwsem(&newmt_lock);
mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
mt_set_external_lock(&mt, &mt_lock);
down_write(&mt_lock);
for (i = 0; i <= nr_entries; i++) {
mas_set_range(&mas, i*10, i*10 + 5);
mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
}
for (i = 0; i < nr_fork; i++) {
mt_init_flags(&newmt,
MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
mt_set_external_lock(&newmt, &newmt_lock);
down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
if (ret) {
pr_err("OOM!");
BUG_ON(1);
}
mas_set(&newmas, 0);
mas_for_each(&newmas, val, ULONG_MAX)
mas_store(&newmas, val);
mas_destroy(&newmas);
mt_validate(&newmt);
__mt_destroy(&newmt);
up_write(&newmt_lock);
}
mas_destroy(&mas);
__mt_destroy(&mt);
up_write(&mt_lock);
}
#endif
static noinline void __init next_prev_test(struct maple_tree *mt)
{ … }
static noinline void __init check_spanning_relatives(struct maple_tree *mt)
{ … }
static noinline void __init check_fuzzer(struct maple_tree *mt)
{ … }
static noinline void __init check_dup_gaps(struct maple_tree *mt,
unsigned long nr_entries, bool zero_start,
unsigned long gap)
{ … }
static noinline void __init check_dup(struct maple_tree *mt)
{ … }
static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
{ … }
static noinline void __init check_empty_area_window(struct maple_tree *mt)
{ … }
static noinline void __init check_empty_area_fill(struct maple_tree *mt)
{ … }
static noinline void __init check_state_handling(struct maple_tree *mt)
{ … }
static noinline void __init alloc_cyclic_testing(struct maple_tree *mt)
{ … }
static DEFINE_MTREE(tree);
static int __init maple_tree_seed(void)
{ … }
static void __exit maple_tree_harvest(void)
{ … }
module_init(…) …;
module_exit(maple_tree_harvest);
MODULE_AUTHOR(…) …;
MODULE_DESCRIPTION(…) …;
MODULE_LICENSE(…) …;