linux/lib/test_maple_tree.c

// SPDX-License-Identifier: GPL-2.0+
/*
 * test_maple_tree.c: Test the maple tree API
 * Copyright (c) 2018-2022 Oracle Corporation
 * Author: Liam R. Howlett <[email protected]>
 *
 * Any tests that only require the interface of the tree.
 */

#include <linux/maple_tree.h>
#include <linux/module.h>
#include <linux/rwsem.h>

#define MTREE_ALLOC_MAX
#define CONFIG_MAPLE_SEARCH
#define MAPLE_32BIT

#ifndef CONFIG_DEBUG_MAPLE_TREE
#define mt_dump
#define mt_validate
#define mt_cache_shrink
#define mas_dump
#define mas_wr_dump
atomic_t maple_tree_tests_run;
atomic_t maple_tree_tests_passed;
#undef MT_BUG_ON

#define MT_BUG_ON
#endif

/* #define BENCH_SLOT_STORE */
/* #define BENCH_NODE_STORE */
/* #define BENCH_AWALK */
/* #define BENCH_WALK */
/* #define BENCH_LOAD */
/* #define BENCH_MT_FOR_EACH */
/* #define BENCH_FORK */
/* #define BENCH_MAS_FOR_EACH */
/* #define BENCH_MAS_PREV */

#ifdef __KERNEL__
#define mt_set_non_kernel(x)
#define mt_zero_nr_tallocated(x)
#else
#define cond_resched
#endif

#define mas_is_none(x)
#define mas_is_overflow(x)
#define mas_is_underflow(x)

static int __init mtree_insert_index(struct maple_tree *mt,
				     unsigned long index, gfp_t gfp)
{}

static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
{}

static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
				void *ptr)
{}

static int __init mtree_test_store_range(struct maple_tree *mt,
			unsigned long start, unsigned long end, void *ptr)
{}

static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
				void *ptr)
{}

static int __init mtree_test_insert_range(struct maple_tree *mt,
			unsigned long start, unsigned long end, void *ptr)
{}

static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
{}

static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
{}

#if defined(CONFIG_64BIT)
static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
		unsigned long start, unsigned long end, unsigned long size,
		unsigned long expected, int eret, void *ptr)
{}

static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
		unsigned long start, unsigned long end, unsigned long size,
		unsigned long expected, int eret, void *ptr)
{}
#endif

static noinline void __init check_load(struct maple_tree *mt,
				       unsigned long index, void *ptr)
{}

static noinline void __init check_store_range(struct maple_tree *mt,
		unsigned long start, unsigned long end, void *ptr, int expected)
{}

static noinline void __init check_insert_range(struct maple_tree *mt,
		unsigned long start, unsigned long end, void *ptr, int expected)
{}

static noinline void __init check_insert(struct maple_tree *mt,
					 unsigned long index, void *ptr)
{}

static noinline void __init check_dup_insert(struct maple_tree *mt,
				      unsigned long index, void *ptr)
{}


static noinline void __init check_index_load(struct maple_tree *mt,
					     unsigned long index)
{}

static inline __init int not_empty(struct maple_node *node)
{}


static noinline void __init check_rev_seq(struct maple_tree *mt,
					  unsigned long max, bool verbose)
{}

static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
		bool verbose)
{}

static noinline void __init check_lb_not_empty(struct maple_tree *mt)
{}

static noinline void __init check_lower_bound_split(struct maple_tree *mt)
{}

static noinline void __init check_upper_bound_split(struct maple_tree *mt)
{}

static noinline void __init check_mid_split(struct maple_tree *mt)
{}

static noinline void __init check_rev_find(struct maple_tree *mt)
{}

static noinline void __init check_find(struct maple_tree *mt)
{}

static noinline void __init check_find_2(struct maple_tree *mt)
{}


#if defined(CONFIG_64BIT)
static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
{}

static noinline void __init check_alloc_range(struct maple_tree *mt)
{}
#endif

static noinline void __init check_ranges(struct maple_tree *mt)
{}

static noinline void __init check_next_entry(struct maple_tree *mt)
{}

static noinline void __init check_prev_entry(struct maple_tree *mt)
{}

static noinline void __init check_root_expand(struct maple_tree *mt)
{}

static noinline void __init check_gap_combining(struct maple_tree *mt)
{}
static noinline void __init check_node_overwrite(struct maple_tree *mt)
{}

#if defined(BENCH_SLOT_STORE)
static noinline void __init bench_slot_store(struct maple_tree *mt)
{
	int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;

	for (i = 0; i < max; i += 10)
		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);

	for (i = 0; i < count; i++) {
		mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
		mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
				  GFP_KERNEL);
	}
}
#endif

#if defined(BENCH_NODE_STORE)
static noinline void __init bench_node_store(struct maple_tree *mt)
{
	int i, overwrite = 76, max = 240, count = 20000000;

	for (i = 0; i < max; i += 10)
		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);

	for (i = 0; i < count; i++) {
		mtree_store_range(mt, overwrite,  overwrite + 15,
				  xa_mk_value(overwrite), GFP_KERNEL);

		overwrite += 5;
		if (overwrite >= 135)
			overwrite = 76;
	}
}
#endif

#if defined(BENCH_AWALK)
static noinline void __init bench_awalk(struct maple_tree *mt)
{
	int i, max = 2500, count = 50000000;
	MA_STATE(mas, mt, 1470, 1470);

	for (i = 0; i < max; i += 10)
		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);

	mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);

	for (i = 0; i < count; i++) {
		mas_empty_area_rev(&mas, 0, 2000, 10);
		mas_reset(&mas);
	}
}
#endif
#if defined(BENCH_WALK)
static noinline void __init bench_walk(struct maple_tree *mt)
{
	int i, max = 2500, count = 550000000;
	MA_STATE(mas, mt, 1470, 1470);

	for (i = 0; i < max; i += 10)
		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);

	for (i = 0; i < count; i++) {
		mas_walk(&mas);
		mas_reset(&mas);
	}

}
#endif

#if defined(BENCH_LOAD)
static noinline void __init bench_load(struct maple_tree *mt)
{
	int i, max = 2500, count = 550000000;

	for (i = 0; i < max; i += 10)
		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);

	for (i = 0; i < count; i++)
		mtree_load(mt, 1470);
}
#endif

#if defined(BENCH_MT_FOR_EACH)
static noinline void __init bench_mt_for_each(struct maple_tree *mt)
{
	int i, count = 1000000;
	unsigned long max = 2500, index = 0;
	void *entry;

	for (i = 0; i < max; i += 5)
		mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);

	for (i = 0; i < count; i++) {
		unsigned long j = 0;

		mt_for_each(mt, entry, index, max) {
			MT_BUG_ON(mt, entry != xa_mk_value(j));
			j += 5;
		}

		index = 0;
	}

}
#endif

#if defined(BENCH_MAS_FOR_EACH)
static noinline void __init bench_mas_for_each(struct maple_tree *mt)
{
	int i, count = 1000000;
	unsigned long max = 2500;
	void *entry;
	MA_STATE(mas, mt, 0, 0);

	for (i = 0; i < max; i += 5) {
		int gap = 4;

		if (i % 30 == 0)
			gap = 3;
		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
	}

	rcu_read_lock();
	for (i = 0; i < count; i++) {
		unsigned long j = 0;

		mas_for_each(&mas, entry, max) {
			MT_BUG_ON(mt, entry != xa_mk_value(j));
			j += 5;
		}
		mas_set(&mas, 0);
	}
	rcu_read_unlock();

}
#endif
#if defined(BENCH_MAS_PREV)
static noinline void __init bench_mas_prev(struct maple_tree *mt)
{
	int i, count = 1000000;
	unsigned long max = 2500;
	void *entry;
	MA_STATE(mas, mt, 0, 0);

	for (i = 0; i < max; i += 5) {
		int gap = 4;

		if (i % 30 == 0)
			gap = 3;
		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
	}

	rcu_read_lock();
	for (i = 0; i < count; i++) {
		unsigned long j = 2495;

		mas_set(&mas, ULONG_MAX);
		while ((entry = mas_prev(&mas, 0)) != NULL) {
			MT_BUG_ON(mt, entry != xa_mk_value(j));
			j -= 5;
		}
	}
	rcu_read_unlock();

}
#endif
/* check_forking - simulate the kernel forking sequence with the tree. */
static noinline void __init check_forking(void)
{}

static noinline void __init check_iteration(struct maple_tree *mt)
{}

static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
{}

#if defined(BENCH_FORK)
static noinline void __init bench_forking(void)
{
	struct maple_tree mt, newmt;
	int i, nr_entries = 134, nr_fork = 80000, ret;
	void *val;
	MA_STATE(mas, &mt, 0, 0);
	MA_STATE(newmas, &newmt, 0, 0);
	struct rw_semaphore mt_lock, newmt_lock;

	init_rwsem(&mt_lock);
	init_rwsem(&newmt_lock);

	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
	mt_set_external_lock(&mt, &mt_lock);

	down_write(&mt_lock);
	for (i = 0; i <= nr_entries; i++) {
		mas_set_range(&mas, i*10, i*10 + 5);
		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
	}

	for (i = 0; i < nr_fork; i++) {
		mt_init_flags(&newmt,
			      MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
		mt_set_external_lock(&newmt, &newmt_lock);

		down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
		ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
		if (ret) {
			pr_err("OOM!");
			BUG_ON(1);
		}

		mas_set(&newmas, 0);
		mas_for_each(&newmas, val, ULONG_MAX)
			mas_store(&newmas, val);

		mas_destroy(&newmas);
		mt_validate(&newmt);
		__mt_destroy(&newmt);
		up_write(&newmt_lock);
	}
	mas_destroy(&mas);
	__mt_destroy(&mt);
	up_write(&mt_lock);
}
#endif

static noinline void __init next_prev_test(struct maple_tree *mt)
{}



/* Test spanning writes that require balancing right sibling or right cousin */
static noinline void __init check_spanning_relatives(struct maple_tree *mt)
{}

static noinline void __init check_fuzzer(struct maple_tree *mt)
{}

/* duplicate the tree with a specific gap */
static noinline void __init check_dup_gaps(struct maple_tree *mt,
				    unsigned long nr_entries, bool zero_start,
				    unsigned long gap)
{}

/* Duplicate many sizes of trees.  Mainly to test expected entry values */
static noinline void __init check_dup(struct maple_tree *mt)
{}

static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
{}

static noinline void __init check_empty_area_window(struct maple_tree *mt)
{}

static noinline void __init check_empty_area_fill(struct maple_tree *mt)
{}

/*
 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
 *
 * The table below shows the single entry tree (0-0 pointer) and normal tree
 * with nodes.
 *
 * Function	ENTRY	Start		Result		index & last
 *     ┬          ┬       ┬               ┬                ┬
 *     │          │       │               │                └─ the final range
 *     │          │       │               └─ The node value after execution
 *     │          │       └─ The node value before execution
 *     │          └─ If the entry exists or does not exists (DNE)
 *     └─ The function name
 *
 * Function	ENTRY	Start		Result		index & last
 * mas_next()
 *  - after last
 *			Single entry tree at 0-0
 *			------------------------
 *		DNE	MAS_START	MAS_NONE	1 - oo
 *		DNE	MAS_PAUSE	MAS_NONE	1 - oo
 *		DNE	MAS_ROOT	MAS_NONE	1 - oo
 *			when index = 0
 *		DNE	MAS_NONE	MAS_ROOT	0
 *			when index > 0
 *		DNE	MAS_NONE	MAS_NONE	1 - oo
 *
 *			Normal tree
 *			-----------
 *		exists	MAS_START	active		range
 *		DNE	MAS_START	active		set to last range
 *		exists	MAS_PAUSE	active		range
 *		DNE	MAS_PAUSE	active		set to last range
 *		exists	MAS_NONE	active		range
 *		exists	active		active		range
 *		DNE	active		active		set to last range
 *		ERANGE	active		MAS_OVERFLOW	last range
 *
 * Function	ENTRY	Start		Result		index & last
 * mas_prev()
 * - before index
 *			Single entry tree at 0-0
 *			------------------------
 *				if index > 0
 *		exists	MAS_START	MAS_ROOT	0
 *		exists	MAS_PAUSE	MAS_ROOT	0
 *		exists	MAS_NONE	MAS_ROOT	0
 *
 *				if index == 0
 *		DNE	MAS_START	MAS_NONE	0
 *		DNE	MAS_PAUSE	MAS_NONE	0
 *		DNE	MAS_NONE	MAS_NONE	0
 *		DNE	MAS_ROOT	MAS_NONE	0
 *
 *			Normal tree
 *			-----------
 *		exists	MAS_START	active		range
 *		DNE	MAS_START	active		set to min
 *		exists	MAS_PAUSE	active		range
 *		DNE	MAS_PAUSE	active		set to min
 *		exists	MAS_NONE	active		range
 *		DNE	MAS_NONE	MAS_NONE	set to min
 *		any	MAS_ROOT	MAS_NONE	0
 *		exists	active		active		range
 *		DNE	active		active		last range
 *		ERANGE	active		MAS_UNDERFLOW	last range
 *
 * Function	ENTRY	Start		Result		index & last
 * mas_find()
 *  - at index or next
 *			Single entry tree at 0-0
 *			------------------------
 *				if index >  0
 *		DNE	MAS_START	MAS_NONE	0
 *		DNE	MAS_PAUSE	MAS_NONE	0
 *		DNE	MAS_ROOT	MAS_NONE	0
 *		DNE	MAS_NONE	MAS_NONE	1
 *				if index ==  0
 *		exists	MAS_START	MAS_ROOT	0
 *		exists	MAS_PAUSE	MAS_ROOT	0
 *		exists	MAS_NONE	MAS_ROOT	0
 *
 *			Normal tree
 *			-----------
 *		exists	MAS_START	active		range
 *		DNE	MAS_START	active		set to max
 *		exists	MAS_PAUSE	active		range
 *		DNE	MAS_PAUSE	active		set to max
 *		exists	MAS_NONE	active		range (start at last)
 *		exists	active		active		range
 *		DNE	active		active		last range (max < last)
 *
 * Function	ENTRY	Start		Result		index & last
 * mas_find_rev()
 *  - at index or before
 *			Single entry tree at 0-0
 *			------------------------
 *				if index >  0
 *		exists	MAS_START	MAS_ROOT	0
 *		exists	MAS_PAUSE	MAS_ROOT	0
 *		exists	MAS_NONE	MAS_ROOT	0
 *				if index ==  0
 *		DNE	MAS_START	MAS_NONE	0
 *		DNE	MAS_PAUSE	MAS_NONE	0
 *		DNE	MAS_NONE	MAS_NONE	0
 *		DNE	MAS_ROOT	MAS_NONE	0
 *
 *			Normal tree
 *			-----------
 *		exists	MAS_START	active		range
 *		DNE	MAS_START	active		set to min
 *		exists	MAS_PAUSE	active		range
 *		DNE	MAS_PAUSE	active		set to min
 *		exists	MAS_NONE	active		range (start at index)
 *		exists	active		active		range
 *		DNE	active		active		last range (min > index)
 *
 * Function	ENTRY	Start		Result		index & last
 * mas_walk()
 * - Look up index
 *			Single entry tree at 0-0
 *			------------------------
 *				if index >  0
 *		DNE	MAS_START	MAS_ROOT	1 - oo
 *		DNE	MAS_PAUSE	MAS_ROOT	1 - oo
 *		DNE	MAS_NONE	MAS_ROOT	1 - oo
 *		DNE	MAS_ROOT	MAS_ROOT	1 - oo
 *				if index ==  0
 *		exists	MAS_START	MAS_ROOT	0
 *		exists	MAS_PAUSE	MAS_ROOT	0
 *		exists	MAS_NONE	MAS_ROOT	0
 *		exists	MAS_ROOT	MAS_ROOT	0
 *
 *			Normal tree
 *			-----------
 *		exists	MAS_START	active		range
 *		DNE	MAS_START	active		range of NULL
 *		exists	MAS_PAUSE	active		range
 *		DNE	MAS_PAUSE	active		range of NULL
 *		exists	MAS_NONE	active		range
 *		DNE	MAS_NONE	active		range of NULL
 *		exists	active		active		range
 *		DNE	active		active		range of NULL
 */

static noinline void __init check_state_handling(struct maple_tree *mt)
{}

static noinline void __init alloc_cyclic_testing(struct maple_tree *mt)
{}

static DEFINE_MTREE(tree);
static int __init maple_tree_seed(void)
{}

static void __exit maple_tree_harvest(void)
{}

module_init();
module_exit(maple_tree_harvest);
MODULE_AUTHOR();
MODULE_DESCRIPTION();
MODULE_LICENSE();