linux/lib/interval_tree.c

// SPDX-License-Identifier: GPL-2.0-only
#include <linux/interval_tree.h>
#include <linux/interval_tree_generic.h>
#include <linux/compiler.h>
#include <linux/export.h>

#define START(node)
#define LAST(node)

INTERVAL_TREE_DEFINE()

EXPORT_SYMBOL_GPL();
EXPORT_SYMBOL_GPL();
EXPORT_SYMBOL_GPL();
EXPORT_SYMBOL_GPL();

#ifdef CONFIG_INTERVAL_TREE_SPAN_ITER
/*
 * Roll nodes[1] into nodes[0] by advancing nodes[1] to the end of a contiguous
 * span of nodes. This makes nodes[0]->last the end of that contiguous used span
 * indexes that started at the original nodes[1]->start. nodes[1] is now the
 * first node starting the next used span. A hole span is between nodes[0]->last
 * and nodes[1]->start. nodes[1] must be !NULL.
 */
static void
interval_tree_span_iter_next_gap(struct interval_tree_span_iter *state)
{}

void interval_tree_span_iter_first(struct interval_tree_span_iter *iter,
				   struct rb_root_cached *itree,
				   unsigned long first_index,
				   unsigned long last_index)
{}
EXPORT_SYMBOL_GPL();

void interval_tree_span_iter_next(struct interval_tree_span_iter *iter)
{}
EXPORT_SYMBOL_GPL();

/*
 * Advance the iterator index to a specific position. The returned used/hole is
 * updated to start at new_index. This is faster than calling
 * interval_tree_span_iter_first() as it can avoid full searches in several
 * cases where the iterator is already set.
 */
void interval_tree_span_iter_advance(struct interval_tree_span_iter *iter,
				     struct rb_root_cached *itree,
				     unsigned long new_index)
{}
EXPORT_SYMBOL_GPL();
#endif