c550c2 | 2001-04-30 | Martin Stjernholm | | |
5dc193 | 2001-05-01 | Martin Stjernholm | | * $Id: rbtree.h,v 1.3 2001/05/01 07:52:20 mast Exp $
|
c550c2 | 2001-04-30 | Martin Stjernholm | | */
#ifndef RBTREE_H
#define RBTREE_H
|
881484 | 2001-05-01 | Martin Stjernholm | | #include "array.h"
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
struct rb_node_hdr {
struct rb_node_hdr *prev, *next;
unsigned INT16 flags;
};
|
881484 | 2001-05-01 | Martin Stjernholm | | * assign_rb_node_ind_no_free() or similar instead. */
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
struct rb_node_ind {
struct rb_node_ind *prev, *next;
struct svalue ind;
};
struct rb_node_indval {
struct rb_node_indval *prev, *next;
struct svalue ind, val;
};
union rb_node {
struct rb_node_hdr h;
struct rb_node_ind i;
struct rb_node_indval iv;
};
#define RB_RED 0x2000
#define RB_THREAD_PREV 0x4000
#define RB_THREAD_NEXT 0x8000
#define RB_FLAG_MASK 0xe000
#define RB_FLAG_MARKER 0x1000
#define RB_IND_FLAG_MASK 0xf000
|
881484 | 2001-05-01 | Martin Stjernholm | | PMOD_EXPORT int rb_ind_default_cmp (struct svalue *key, union rb_node *node);
typedef int low_rb_cmp_fn (void *key, struct rb_node_hdr *node);
|
5dc193 | 2001-05-01 | Martin Stjernholm | | PMOD_EXPORT union rb_node *rb_find_eq_extcmp (
union rb_node *tree, struct svalue *key, struct svalue *cmp_less);
PMOD_EXPORT union rb_node *rb_find_lt_extcmp (
union rb_node *tree, struct svalue *key, struct svalue *cmp_less);
PMOD_EXPORT union rb_node *rb_find_gt_extcmp (
union rb_node *tree, struct svalue *key, struct svalue *cmp_less);
PMOD_EXPORT union rb_node *rb_find_le_extcmp (
union rb_node *tree, struct svalue *key, struct svalue *cmp_less);
PMOD_EXPORT union rb_node *rb_find_ge_extcmp (
union rb_node *tree, struct svalue *key, struct svalue *cmp_less);
|
881484 | 2001-05-01 | Martin Stjernholm | | PMOD_EXPORT struct rb_node_hdr *rb_first (struct rb_node_hdr *tree);
PMOD_EXPORT struct rb_node_hdr *rb_last (struct rb_node_hdr *tree);
PMOD_EXPORT struct rb_node_hdr *rb_prev (struct rb_node_hdr *node);
PMOD_EXPORT struct rb_node_hdr *rb_next (struct rb_node_hdr *node);
#ifdef PIKE_DEBUG
static inline struct rb_node_hdr *rb_node_ind_check (struct rb_node_ind *node)
{return (struct rb_node_hdr *) node;}
static inline struct rb_node_hdr *rb_node_indval_check (struct rb_node_indval *node)
{return (struct rb_node_hdr *) node;}
#else
#define rb_node_ind_check(node) ((struct rb_node_hdr *) (node))
#define rb_node_indval_check(node) ((struct rb_node_hdr *) (node))
#endif
#define RB_DECL_FIND_FUNC(type, func, tree, key, cmp_less) \
((struct type *) debug_malloc_pass ( \
(cmp_less) ? (struct rb_node_hdr *) PIKE_CONCAT (func, _extcmp) ( \
(union rb_node *) debug_malloc_pass (PIKE_CONCAT (type, _check) (tree)), \
dmalloc_touch (struct svalue *, key), \
(cmp_less)) \
: PIKE_CONCAT (low_, func) ( \
(struct rb_node_hdr *) debug_malloc_pass (PIKE_CONCAT (type, _check) (tree)), \
(low_rb_cmp_fn *) rb_ind_default_cmp, \
dmalloc_touch (struct svalue *, key))))
#define RB_DECL_STEP_FUNC(type, func, node) \
((struct type *) debug_malloc_pass ( \
func ((struct rb_node_hdr *) debug_malloc_pass (PIKE_CONCAT (type, _check) (node)))))
|
c550c2 | 2001-04-30 | Martin Stjernholm | | |
881484 | 2001-05-01 | Martin Stjernholm | | * Returns an arbitrary entry which has the given index, or zero
|
c550c2 | 2001-04-30 | Martin Stjernholm | | * if none exists.
*
* find_lt, find_gt, find_le, find_ge:
* find_lt and find_le returns the biggest entry which satisfy the
* condition, and vice versa for the other two. This means that
* e.g. rb_next when used on the returned node from find_le never
* returns an entry with the same index.
*
* If cmp_less is nonzero, it's a function pointer used as `< to
* compare the entries. If it's zero the internal set order is used.
* All destructive operations might change the tree root.
*/
PMOD_EXPORT struct rb_node_ind *rb_ind_insert (struct rb_node_ind **tree,
struct svalue *ind,
struct svalue *cmp_less);
PMOD_EXPORT struct rb_node_ind *rb_ind_add (struct rb_node_ind **tree,
struct svalue *ind,
struct svalue *cmp_less);
PMOD_EXPORT int rb_ind_delete (struct rb_node_ind **tree,
struct svalue *ind,
struct svalue *cmp_less);
PMOD_EXPORT struct rb_node_ind *rb_ind_copy (struct rb_node_ind *tree);
PMOD_EXPORT void rb_ind_free (struct rb_node_ind *tree);
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define rb_ind_find_eq(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_ind, \
rb_find_eq, tree, key, cmp_less)
#define rb_ind_find_lt(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_ind, \
rb_find_lt, tree, key, cmp_less)
#define rb_ind_find_gt(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_ind, \
rb_find_gt, tree, key, cmp_less)
#define rb_ind_find_le(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_ind, \
rb_find_le, tree, key, cmp_less)
#define rb_ind_find_ge(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_ind, \
rb_find_ge, tree, key, cmp_less)
|
881484 | 2001-05-01 | Martin Stjernholm | | #define rb_ind_first(tree) RB_DECL_STEP_FUNC (rb_node_ind, rb_first, tree)
#define rb_ind_last(tree) RB_DECL_STEP_FUNC (rb_node_ind, rb_last, tree)
#define rb_ind_prev(tree) RB_DECL_STEP_FUNC (rb_node_ind, rb_prev, tree)
#define rb_ind_next(tree) RB_DECL_STEP_FUNC (rb_node_ind, rb_next, tree)
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
PMOD_EXPORT struct rb_node_indval *rb_indval_insert (struct rb_node_indval **tree,
struct svalue *ind,
struct svalue *val,
struct svalue *cmp_less);
PMOD_EXPORT struct rb_node_indval *rb_indval_add (struct rb_node_indval **tree,
struct svalue *ind,
struct svalue *val,
struct svalue *cmp_less);
PMOD_EXPORT struct rb_node_indval *rb_indval_add_after (struct rb_node_indval **tree,
struct rb_node_indval *node,
struct svalue *ind,
struct svalue *val,
struct svalue *cmp_less);
PMOD_EXPORT int rb_indval_delete (struct rb_node_indval **tree,
struct svalue *ind,
struct svalue *cmp_less);
PMOD_EXPORT struct rb_node_indval *rb_indval_delete_node (struct rb_node_indval **tree,
struct rb_node_indval *node,
struct svalue *cmp_less);
PMOD_EXPORT struct rb_node_indval *rb_indval_copy (struct rb_node_indval *tree);
PMOD_EXPORT void rb_indval_free (struct rb_node_indval *tree);
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define rb_indval_find_eq(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_indval, \
rb_find_eq, tree, key, cmp_less)
#define rb_indval_find_lt(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_indval, \
rb_find_lt, tree, key, cmp_less)
#define rb_indval_find_gt(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_indval, \
rb_find_gt, tree, key, cmp_less)
#define rb_indval_find_le(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_indval, \
rb_find_le, tree, key, cmp_less)
#define rb_indval_find_ge(tree, key, cmp_less) RB_DECL_FIND_FUNC (rb_node_indval, \
rb_find_ge, tree, key, cmp_less)
|
881484 | 2001-05-01 | Martin Stjernholm | | #define rb_indval_first(tree) RB_DECL_STEP_FUNC (rb_node_indval, rb_first, tree)
#define rb_indval_last(tree) RB_DECL_STEP_FUNC (rb_node_indval, rb_last, tree)
#define rb_indval_prev(tree) RB_DECL_STEP_FUNC (rb_node_indval, rb_prev, tree)
#define rb_indval_next(tree) RB_DECL_STEP_FUNC (rb_node_indval, rb_next, tree)
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
#define assign_rb_node_ind_no_free(to, node) do { \
struct svalue *_rb_node_to = (to); \
*_rb_node_to = (node)->ind; \
_rb_node_to->type &= ~RB_IND_FLAG_MASK; \
add_ref_svalue (_rb_node_to); \
} while (0)
#define assign_rb_node_ind(to, node) do { \
struct svalue *_rb_node_to2 = (to); \
free_svalue (_rb_node_to2); \
assign_rb_node_ind_no_free (_rb_node_to2, (node)); \
} while (0)
#define push_rb_node_ind(node) do { \
assign_rb_node_ind_no_free (Pike_sp, (node)); \
Pike_sp++; \
} while (0)
#define use_rb_node_ind(node, var) \
(var = (node)->ind, var.type &= ~RB_IND_FLAG_MASK, var)
#define STACK_SLICE_SIZE 20
|
5dc193 | 2001-05-01 | Martin Stjernholm | | struct rbstack_slice
|
c550c2 | 2001-04-30 | Martin Stjernholm | | {
|
5dc193 | 2001-05-01 | Martin Stjernholm | | struct rbstack_slice *up;
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *stack[STACK_SLICE_SIZE];
};
|
5dc193 | 2001-05-01 | Martin Stjernholm | | struct rbstack_ptr
{
struct rbstack_slice *slice;
size_t ssp;
};
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
|
5dc193 | 2001-05-01 | Martin Stjernholm | | PMOD_EXPORT void rbstack_push (struct rbstack_ptr *rbstack,
struct rb_node_hdr *node);
PMOD_EXPORT void rbstack_pop (struct rbstack_ptr *rbstack);
PMOD_EXPORT void rbstack_up (struct rbstack_ptr *rbstack);
PMOD_EXPORT void rbstack_free (struct rbstack_ptr *rbstack);
#define RBSTACK_INIT(rbstack) \
struct rbstack_slice PIKE_CONCAT3 (_, rbstack, _top_); \
struct rbstack_ptr rbstack = { \
(PIKE_CONCAT3 (_, rbstack, _top_).up = 0, \
&PIKE_CONCAT3 (_, rbstack, _top_)), \
0 \
}
#define RBSTACK_PUSH(rbstack, node) do { \
if ((rbstack).ssp < STACK_SLICE_SIZE) \
(rbstack).slice->stack[(rbstack).ssp++] = (node); \
else rbstack_push (&(rbstack), node); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | } while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define RBSTACK_POP(rbstack, node) do { \
if ((rbstack).ssp) { \
(node) = (rbstack).slice->stack[--(rbstack).ssp]; \
if (!(rbstack).ssp && (rbstack).slice->up) \
rbstack_pop (&(rbstack)); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | } \
else (node) = 0; \
} while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define RBSTACK_POP_IGNORE(rbstack) do { \
if ((rbstack).ssp && !--(rbstack).ssp && (rbstack).slice->up) \
rbstack_pop (&(rbstack)); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | } while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define RBSTACK_UP(rbstack, node) do { \
if ((rbstack).ssp) { \
(node) = (rbstack).slice->stack[--(rbstack).ssp]; \
if (!(rbstack).ssp && (rbstack).slice->up) rbstack_up (&(rbstack)); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | } \
else (node) = 0; \
} while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define RBSTACK_PEEK(rbstack) \
((rbstack).ssp ? (rbstack).slice->stack[(rbstack).ssp - 1] : 0)
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define RBSTACK_FREE(rbstack) do { \
if ((rbstack).ssp) { \
if ((rbstack).slice->up) rbstack_free (&(rbstack)); \
(rbstack).ssp = 0; \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | } \
} while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define RBSTACK_FREE_SET_ROOT(rbstack, node) do { \
if ((rbstack).ssp) { \
if ((rbstack).slice->up) rbstack_free (&(rbstack)); \
(node) = (rbstack).slice->stack[0]; \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | } \
} while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define LOW_RB_TRAVERSE(label, rbstack, node, push, p_leaf, p_sub, between, n_leaf, n_sub, pop) \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | do { \
struct rb_node_hdr *low_rb_last_; \
if (node) { \
PIKE_CONCAT (enter_, label): \
{push;} \
if ((node)->flags & RB_THREAD_PREV) \
{p_leaf;} \
else { \
{p_sub;} \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | RBSTACK_PUSH (rbstack, node); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | (node) = (node)->prev; \
goto PIKE_CONCAT (enter_, label); \
} \
PIKE_CONCAT (between_, label): \
{between;} \
if ((node)->flags & RB_THREAD_NEXT) \
{n_leaf;} \
else { \
{n_sub;} \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | RBSTACK_PUSH (rbstack, node); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | (node) = (node)->next; \
goto PIKE_CONCAT (enter_, label); \
} \
PIKE_CONCAT (leave_, label): \
while (1) { \
{pop;} \
low_rb_last_ = (node); \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | RBSTACK_POP (rbstack, node); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | if (!(node)) break; \
if (low_rb_last_ == (node)->prev) \
goto PIKE_CONCAT (between_, label); \
} \
} \
} while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define LOW_RB_DEBUG_TRAVERSE(label, rbstack, node, push, p_leaf, p_sub, between, n_leaf, n_sub, pop) \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | do { \
size_t PIKE_CONCAT (depth_, label) = 0; \
LOW_RB_TRAVERSE( \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | label, rbstack, node, \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | fprintf (stderr, "%*s%p enter\n", \
PIKE_CONCAT (depth_, label)++, "", node); {push;}, \
fprintf (stderr, "%*s%p prev leaf\n", \
PIKE_CONCAT (depth_, label), "", node); {p_leaf;}, \
fprintf (stderr, "%*s%p prev subtree\n", \
PIKE_CONCAT (depth_, label), "", node); {p_sub;}, \
fprintf (stderr, "%*s%p between\n", \
PIKE_CONCAT (depth_, label) - 1, "", node); {between;}, \
fprintf (stderr, "%*s%p next leaf\n", \
PIKE_CONCAT (depth_, label), "", node); {n_leaf;}, \
fprintf (stderr, "%*s%p next subtree\n", \
PIKE_CONCAT (depth_, label), "", node); {n_sub;}, \
fprintf (stderr, "%*s%p leave\n", \
--PIKE_CONCAT (depth_, label), "", node); {pop;}); \
} while (0)
|
881484 | 2001-05-01 | Martin Stjernholm | |
|
c550c2 | 2001-04-30 | Martin Stjernholm | | #define LOW_RB_FIND(node, cmp, got_lt, got_eq, got_gt) \
do { \
int cmp_res; \
while (1) { \
DO_IF_DEBUG (if (!node) fatal ("Recursing into null node.\n")); \
{cmp;} \
if (cmp_res > 0) { \
if ((node)->flags & RB_THREAD_NEXT) { \
{got_lt;} \
break; \
} \
(node) = (node)->next; \
} \
else if (cmp_res < 0) { \
if ((node)->flags & RB_THREAD_PREV) { \
{got_gt;} \
break; \
} \
(node) = (node)->prev; \
} \
else { \
{got_eq;} \
break; \
} \
} \
} while (0)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define LOW_RB_TRACK(rbstack, node, cmp, got_lt, got_eq, got_gt) \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | LOW_RB_FIND (node, { \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | RBSTACK_PUSH (rbstack, node); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | {cmp;} \
}, got_lt, got_eq, got_gt)
|
5dc193 | 2001-05-01 | Martin Stjernholm | | #define LOW_RB_TRACK_NEXT(rbstack, node) \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | do { \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | DO_IF_DEBUG ( \
if (node != RBSTACK_PEEK (rbstack)) \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | fatal ("Given node is not on top of stack.\n"); \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | ); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | if (node->flags & RB_THREAD_NEXT) { \
struct rb_node_hdr *low_rb_next_ = node->next; \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | while ((node = RBSTACK_PEEK (rbstack)) != low_rb_next_) \
RBSTACK_POP_IGNORE (rbstack); \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | } \
else { \
node = node->next; \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | while (1) { \
RBSTACK_PUSH (rbstack, node); \
if (node->flags & RB_THREAD_PREV) break; \
|
c550c2 | 2001-04-30 | Martin Stjernholm | | node = node->prev; \
} \
} \
} while (0)
|
881484 | 2001-05-01 | Martin Stjernholm | |
#define LOW_RB_INSERT(tree, node, cmp, insert, replace) \
do { \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | int low_rb_ins_type_; \
RBSTACK_INIT (rbstack); \
|
881484 | 2001-05-01 | Martin Stjernholm | | if (((node) = *(tree))) { \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | LOW_RB_TRACK (rbstack, node, cmp, { \
low_rb_ins_type_ = 1; /* Got less - link at next. */ \
|
881484 | 2001-05-01 | Martin Stjernholm | | }, { \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_ins_type_ = 0; /* Got equal - replace. */ \
|
881484 | 2001-05-01 | Martin Stjernholm | | {replace;} \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | RBSTACK_FREE (rbstack); \
}, { /* Got greater - link at prev. */ \
low_rb_ins_type_ = 2; \
|
881484 | 2001-05-01 | Martin Stjernholm | | }); \
} \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | else low_rb_ins_type_ = 3; \
if (low_rb_ins_type_) { \
DO_IF_DEBUG ((node) = 0); \
|
881484 | 2001-05-01 | Martin Stjernholm | | {insert;} \
|
5dc193 | 2001-05-01 | Martin Stjernholm | | switch (low_rb_ins_type_) { \
case 1: low_rb_link_at_next ((tree), rbstack, (node)); break; \
case 2: low_rb_link_at_prev ((tree), rbstack, (node)); break; \
case 3: low_rb_init_root (*(tree) = (node)); break; \
} \
|
881484 | 2001-05-01 | Martin Stjernholm | | } \
} while (0)
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
typedef void low_rb_move_data_fn (struct rb_node_hdr *to,
struct rb_node_hdr *from);
|
5dc193 | 2001-05-01 | Martin Stjernholm | | typedef struct rb_node_hdr *low_rb_copy_fn (struct rb_node_hdr *node);
typedef void low_rb_free_fn (struct rb_node_hdr *node);
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
void low_rb_init_root (struct rb_node_hdr *new_root);
|
5dc193 | 2001-05-01 | Martin Stjernholm | | void low_rb_link_at_prev (struct rb_node_hdr **tree, struct rbstack_ptr rbstack,
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *new);
|
5dc193 | 2001-05-01 | Martin Stjernholm | | void low_rb_link_at_next (struct rb_node_hdr **tree, struct rbstack_ptr rbstack,
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *new);
struct rb_node_hdr *low_rb_unlink (struct rb_node_hdr **tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | struct rbstack_ptr rbstack,
|
c550c2 | 2001-04-30 | Martin Stjernholm | | low_rb_move_data_fn *move_data);
struct rb_node_hdr *low_rb_insert (struct rb_node_hdr **tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key,
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *new);
void low_rb_add (struct rb_node_hdr **tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key,
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *new);
void low_rb_add_after (struct rb_node_hdr **tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key,
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *new,
struct rb_node_hdr *existing);
struct rb_node_hdr *low_rb_delete (struct rb_node_hdr **tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key,
low_rb_move_data_fn *move_data_fn);
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *low_rb_delete_node (struct rb_node_hdr **tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key,
low_rb_move_data_fn *move_data_fn,
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *node);
struct rb_node_hdr *low_rb_copy (struct rb_node_hdr *tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_copy_fn *copy_node_fn);
void low_rb_free (struct rb_node_hdr *tree, low_rb_free_fn *free_node_fn);
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
struct rb_node_hdr *low_rb_find_eq (struct rb_node_hdr *tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key);
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *low_rb_find_lt (struct rb_node_hdr *tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key);
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *low_rb_find_gt (struct rb_node_hdr *tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key);
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *low_rb_find_le (struct rb_node_hdr *tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key);
|
c550c2 | 2001-04-30 | Martin Stjernholm | | struct rb_node_hdr *low_rb_find_ge (struct rb_node_hdr *tree,
|
5dc193 | 2001-05-01 | Martin Stjernholm | | low_rb_cmp_fn *cmp_fn, void *key);
|
c550c2 | 2001-04-30 | Martin Stjernholm | |
#ifdef PIKE_DEBUG
void debug_check_rb_tree (struct rb_node_hdr *tree);
#endif
void init_rbtree (void);
void exit_rbtree (void);
#endif /* RBTREE_H */
|