e576bb | 2002-10-11 | Martin Nilsson | |
|
1b10db | 2002-10-08 | Martin Nilsson | |
|
bdf669 | 2001-12-10 | Martin Stjernholm | |
#ifndef RBTREE_LOW_H
#define RBTREE_LOW_H
#include "rbtree.h"
#define STACK_SLICE_SIZE 20
struct rbstack_slice
{
#ifdef RB_STATS
size_t depth, maxdepth;
#endif
struct rbstack_slice *up;
struct rb_node_hdr *stack[STACK_SLICE_SIZE];
};
struct rbstack_ptr
{
struct rbstack_slice *slice;
size_t ssp;
};
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | void rbstack_low_push (struct rbstack_ptr *rbstack, struct rb_node_hdr *node);
void rbstack_low_pop (struct rbstack_ptr *rbstack);
void rbstack_low_up (struct rbstack_ptr *rbstack);
void rbstack_low_up_to_root (struct rbstack_ptr *rbstack);
void rbstack_low_free (struct rbstack_ptr *rbstack);
|
f46bdf | 2009-04-06 | Martin Stjernholm | | void rbstack_do_free (struct rbstack_ptr *rbstack);
|
bdf669 | 2001-12-10 | Martin Stjernholm | | void rbstack_insert (struct rbstack_ptr *top, struct rbstack_ptr *pos,
struct rb_node_hdr *node);
void rbstack_assign (struct rbstack_ptr *target, struct rbstack_ptr *source);
void rbstack_copy (struct rbstack_ptr *target, struct rbstack_ptr *source);
void rbstack_shift (struct rbstack_ptr rbstack,
struct rb_node_hdr *oldbase,
struct rb_node_hdr *newbase);
#define RBSTACK_INIT(rbstack) \
struct rbstack_slice PIKE_CONCAT3 (_, rbstack, _top_) = { \
DO_IF_RB_STATS (0 COMMA 0 COMMA) \
NULL, \
|
b6dcfd | 2002-08-28 | Henrik Grubbström (Grubba) | | {NULL,} \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | }; \
struct rbstack_ptr rbstack = { \
|
90f960 | 2003-01-08 | Henrik Grubbström (Grubba) | | NULL, \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | 0 \
|
90f960 | 2003-01-08 | Henrik Grubbström (Grubba) | | }; \
rbstack.slice = &PIKE_CONCAT3 (_, rbstack, _top_)
|
bdf669 | 2001-12-10 | Martin Stjernholm | |
#define RBSTACK_PUSH(rbstack, node) do { \
if ((rbstack).ssp < STACK_SLICE_SIZE) { \
(rbstack).slice->stack[(rbstack).ssp++] = (node); \
} \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | else rbstack_low_push (&(rbstack), node); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | DO_IF_RB_STATS ( \
if (++(rbstack).slice->depth > (rbstack).slice->maxdepth) \
(rbstack).slice->maxdepth = (rbstack).slice->depth; \
); \
} while (0)
#define RBSTACK_POP(rbstack, node) do { \
if ((rbstack).ssp) { \
(node) = (rbstack).slice->stack[--(rbstack).ssp]; \
DO_IF_RB_STATS ((rbstack).slice->depth--); \
if (!(rbstack).ssp && (rbstack).slice->up) \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | rbstack_low_pop (&(rbstack)); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | } \
else (node) = NULL; \
} while (0)
#define RBSTACK_POP_IGNORE(rbstack) do { \
if ((rbstack).ssp && !--(rbstack).ssp) { \
DO_IF_RB_STATS ((rbstack).slice->depth--); \
if ((rbstack).slice->up) \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | rbstack_low_pop (&(rbstack)); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | } \
} while (0)
#define RBSTACK_UP(rbstack, node) do { \
if ((rbstack).ssp) { \
(node) = (rbstack).slice->stack[--(rbstack).ssp]; \
if (!(rbstack).ssp && (rbstack).slice->up) \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | rbstack_low_up (&(rbstack)); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | } \
else (node) = NULL; \
} while (0)
#define RBSTACK_UP_IGNORE(rbstack) do { \
if ((rbstack).ssp && !--(rbstack).ssp && (rbstack).slice->up) \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | rbstack_low_up (&(rbstack)); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | } while (0)
#define RBSTACK_PEEK(rbstack) \
((rbstack).ssp ? (rbstack).slice->stack[(rbstack).ssp - 1] : NULL)
#define RBSTACK_POKE(rbstack, node) do { \
|
5aad93 | 2002-08-15 | Marcus Comstedt | | DO_IF_DEBUG (if (!(rbstack).ssp) Pike_fatal ("Using free stack pointer.\n")); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | (rbstack).slice->stack[(rbstack).ssp - 1] = (node); \
} while (0)
#define RBSTACK_UP_TO_ROOT(rbstack, node) do { \
if ((rbstack).ssp) { \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | rbstack_low_up_to_root (&(rbstack)); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | (node) = (rbstack).slice->stack[0]; \
} \
} while (0)
#define RBSTACK_FREE(rbstack) do { \
if ((rbstack).ssp) { \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | if ((rbstack).slice->up) rbstack_low_free (&(rbstack)); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | (rbstack).ssp = 0; \
} \
DO_IF_RB_STATS ( \
rb_num_tracks++; \
rb_track_depth += (rbstack).slice->maxdepth; \
if ((rbstack).slice->maxdepth > rb_max_depth) \
rb_max_depth = (rbstack).slice->maxdepth; \
(rbstack).slice->depth = (rbstack).slice->maxdepth = 0; \
); \
} while (0)
#define RBSTACK_FREE_SET_ROOT(rbstack, node) do { \
if ((rbstack).ssp) { \
|
b9a1e9 | 2009-04-06 | Martin Stjernholm | | if ((rbstack).slice->up) rbstack_low_free (&(rbstack)); \
|
9319ac | 2004-05-28 | Martin Stjernholm | | (rbstack).ssp = 0; \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | (node) = (rbstack).slice->stack[0]; \
} \
DO_IF_RB_STATS ( \
rb_num_tracks++; \
rb_track_depth += (rbstack).slice->maxdepth; \
if ((rbstack).slice->maxdepth > rb_max_depth) \
rb_max_depth = (rbstack).slice->maxdepth; \
(rbstack).slice->depth = (rbstack).slice->maxdepth = 0; \
); \
} while (0)
void low_rb_init_root (struct rb_node_hdr *new_root);
void low_rb_link_at_prev (struct rb_node_hdr **root, struct rbstack_ptr rbstack,
|
98945e | 2008-06-29 | Martin Nilsson | | struct rb_node_hdr *new_node);
|
bdf669 | 2001-12-10 | Martin Stjernholm | | void low_rb_link_at_next (struct rb_node_hdr **root, struct rbstack_ptr rbstack,
|
98945e | 2008-06-29 | Martin Nilsson | | struct rb_node_hdr *new_node);
|
bdf669 | 2001-12-10 | Martin Stjernholm | | struct rb_node_hdr *low_rb_unlink_with_move (struct rb_node_hdr **root,
struct rbstack_ptr *rbstack_ptr,
int keep_rbstack,
size_t node_size);
void low_rb_unlink_without_move (struct rb_node_hdr **root,
struct rbstack_ptr *rbstack_ptr,
int keep_rbstack);
void low_rb_build_stack (struct rb_node_hdr *root, struct rb_node_hdr *node,
|
7b28fd | 2009-04-06 | Martin Stjernholm | | struct rbstack_ptr *rbstack_ptr);
|
bdf669 | 2001-12-10 | Martin Stjernholm | |
#if defined (PIKE_DEBUG) || defined (TEST_MULTISET)
typedef void dump_data_fn (struct rb_node_hdr *node, void *extra);
void debug_dump_rb_tree (struct rb_node_hdr *root, dump_data_fn *dump_data, void *extra);
void debug_dump_rbstack (struct rbstack_ptr rbstack, struct rbstack_ptr *pos);
void debug_check_rb_tree (struct rb_node_hdr *root, dump_data_fn *dump_data, void *extra);
void debug_check_rbstack (struct rb_node_hdr *root, struct rbstack_ptr rbstack);
#endif
#define LOW_RB_TRAVERSE(label, rbstack, node, push, p_leaf, p_sub, \
between, n_leaf, n_sub, pop) \
do { \
DO_IF_RB_STATS (rb_num_traverses++); \
if (node) { \
PIKE_CONCAT (enter_, label): \
DO_IF_RB_STATS (rb_num_traverse_ops++); \
{push;} \
if ((node)->flags & RB_THREAD_PREV) \
{p_leaf;} \
else { \
{p_sub;} \
RBSTACK_PUSH (rbstack, node); \
(node) = (node)->prev; \
goto PIKE_CONCAT (enter_, label); \
} \
PIKE_CONCAT (between_, label): \
{between;} \
if ((node)->flags & RB_THREAD_NEXT) \
{n_leaf;} \
else { \
{n_sub;} \
RBSTACK_PUSH (rbstack, node); \
(node) = (node)->next; \
goto PIKE_CONCAT (enter_, label); \
} \
while (1) { \
PIKE_CONCAT (leave_, label): \
DO_IF_RB_STATS (rb_num_traverse_ops++); \
{pop;} \
|
aa113a | 2002-12-19 | Martin Stjernholm | | { \
struct rb_node_hdr *rb_last_ = (node); \
RBSTACK_POP (rbstack, node); \
if (!(node)) break; \
/* Compare with next and not prev to avoid an infinite */ \
/* loop if a node (incorrectly) got prev == next. */ \
if (rb_last_ != (node)->next) \
goto PIKE_CONCAT (between_, label); \
} \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | } \
} \
} while (0)
#define LOW_RB_DEBUG_TRAVERSE(label, rbstack, node, push, p_leaf, p_sub, \
between, n_leaf, n_sub, pop) \
do { \
size_t PIKE_CONCAT (depth_, label) = 0; \
LOW_RB_TRAVERSE( \
label, rbstack, node, \
fprintf (stderr, "%*s%p enter\n", \
PIKE_CONCAT (depth_, label)++, "", node); {push;}, \
fprintf (stderr, "%*s%p prev leaf\n", \
PIKE_CONCAT (depth_, label), "", node); {p_leaf;}, \
fprintf (stderr, "%*s%p prev subtree\n", \
PIKE_CONCAT (depth_, label), "", node); {p_sub;}, \
fprintf (stderr, "%*s%p between\n", \
PIKE_CONCAT (depth_, label) - 1, "", node); {between;}, \
fprintf (stderr, "%*s%p next leaf\n", \
PIKE_CONCAT (depth_, label), "", node); {n_leaf;}, \
fprintf (stderr, "%*s%p next subtree\n", \
PIKE_CONCAT (depth_, label), "", node); {n_sub;}, \
fprintf (stderr, "%*s%p leave\n", \
--PIKE_CONCAT (depth_, label), "", node); {pop;}); \
} while (0)
#define LOW_RB_FIND(node, cmp, got_lt, got_eq, got_gt) \
do { \
int cmp_res, found_eq_ = 0; \
DO_IF_RB_STATS ( \
size_t stat_depth_count_ = 0; \
rb_num_finds++; \
); \
while (1) { \
|
aa113a | 2002-12-19 | Martin Stjernholm | | DO_IF_DEBUG (if (!node) Pike_fatal ("Recursing into null node.\n")); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | DO_IF_RB_STATS ( \
if (++stat_depth_count_ > rb_max_depth) \
rb_max_depth = stat_depth_count_; \
rb_find_depth++; \
); \
{cmp;} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | if (cmp_res < 0) { \
if ((node)->flags & RB_THREAD_PREV) { \
if (found_eq_) { \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | (node) = (node)->prev; \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } else { \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | {got_gt;} \
break; \
} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } else { \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | (node) = (node)->prev; \
continue; \
} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } else { \
if ((node)->flags & RB_THREAD_NEXT) { \
if (cmp_res) { \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | {got_lt;} \
break; \
} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } else { \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | if (!cmp_res) found_eq_ = 1; \
(node) = (node)->next; \
continue; \
} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | {got_eq;} \
break; \
} \
} while (0)
#define LOW_RB_FIND_NEQ(node, cmp, got_lt, got_gt) \
do { \
int cmp_res; \
DO_IF_RB_STATS ( \
size_t stat_depth_count_ = 0; \
rb_num_finds++; \
); \
while (1) { \
|
aa113a | 2002-12-19 | Martin Stjernholm | | DO_IF_DEBUG (if (!node) Pike_fatal ("Recursing into null node.\n")); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | DO_IF_RB_STATS ( \
if (++stat_depth_count_ > rb_max_depth) \
rb_max_depth = stat_depth_count_; \
rb_find_depth++; \
); \
{cmp;} \
if (cmp_res < 0) { \
if ((node)->flags & RB_THREAD_PREV) { \
{got_gt;} \
break; \
} \
(node) = (node)->prev; \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } else { \
|
5aad93 | 2002-08-15 | Marcus Comstedt | | DO_IF_DEBUG (if (!cmp_res) Pike_fatal ("cmp_res 0 not expected.\n")); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | if ((node)->flags & RB_THREAD_NEXT) { \
{got_lt;} \
break; \
} \
(node) = (node)->next; \
} \
} \
} while (0)
#define LOW_RB_TRACK(rbstack, node, cmp, got_lt, got_eq, got_gt) \
do { \
DO_IF_DEBUG ( \
|
aa113a | 2002-12-19 | Martin Stjernholm | | if (RBSTACK_PEEK (rbstack)) Pike_fatal ("The stack is not empty.\n"); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | ); \
DO_IF_RB_STATS (rb_num_finds--); \
LOW_RB_FIND ( \
node, \
{ \
DO_IF_RB_STATS (rb_find_depth--); \
RBSTACK_PUSH (rbstack, node); \
{cmp;} \
}, \
got_lt, \
{ \
while ((node) != RBSTACK_PEEK (rbstack)) \
RBSTACK_POP_IGNORE (rbstack); \
{got_eq;} \
}, got_gt); \
} while (0)
#define LOW_RB_TRACK_NEQ(rbstack, node, cmp, got_lt, got_gt) \
do { \
DO_IF_DEBUG ( \
|
aa113a | 2002-12-19 | Martin Stjernholm | | if (RBSTACK_PEEK (rbstack)) Pike_fatal ("The stack is not empty.\n"); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | ); \
DO_IF_RB_STATS (rb_num_finds--); \
LOW_RB_FIND_NEQ ( \
node, \
{ \
DO_IF_RB_STATS (rb_find_depth--); \
RBSTACK_PUSH (rbstack, node); \
{cmp;} \
}, \
got_lt, got_gt); \
} while (0)
#define LOW_RB_TRACK_FIRST(rbstack, node) \
do { \
DO_IF_DEBUG ( \
|
aa113a | 2002-12-19 | Martin Stjernholm | | if (RBSTACK_PEEK (rbstack)) Pike_fatal ("The stack is not empty.\n"); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | ); \
DO_IF_RB_STATS (rb_num_sidetracks++); \
if (node) { \
struct rb_node_hdr *rb_prev_ = node->prev; \
RBSTACK_PUSH (rbstack, node); \
DO_IF_RB_STATS (rb_num_sidetrack_ops++); \
while (rb_prev_) { \
RBSTACK_PUSH (rbstack, node = rb_prev_); \
DO_IF_RB_STATS (rb_num_sidetrack_ops++); \
rb_prev_ = node->prev; \
} \
} \
} while (0)
#define LOW_RB_TRACK_NEXT(rbstack, node) \
do { \
DO_IF_DEBUG ( \
if (node != RBSTACK_PEEK (rbstack)) \
|
aa113a | 2002-12-19 | Martin Stjernholm | | Pike_fatal ("Given node is not on top of stack.\n"); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | ); \
DO_IF_RB_STATS (rb_num_sidetracks++); \
if (node->flags & RB_THREAD_NEXT) { \
struct rb_node_hdr *rb_next_ = node->next; \
while ((node = RBSTACK_PEEK (rbstack)) != rb_next_) { \
RBSTACK_POP_IGNORE (rbstack); \
DO_IF_RB_STATS (rb_num_sidetrack_ops++); \
} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } else { \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | node = node->next; \
while (1) { \
RBSTACK_PUSH (rbstack, node); \
DO_IF_RB_STATS (rb_num_sidetrack_ops++); \
if (node->flags & RB_THREAD_PREV) break; \
node = node->prev; \
} \
} \
} while (0)
#define LOW_RB_TRACK_PREV(rbstack, node) \
do { \
DO_IF_DEBUG ( \
if (node != RBSTACK_PEEK (rbstack)) \
|
aa113a | 2002-12-19 | Martin Stjernholm | | Pike_fatal ("Given node is not on top of stack.\n"); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | ); \
DO_IF_RB_STATS (rb_num_sidetracks++); \
if (node->flags & RB_THREAD_PREV) { \
struct rb_node_hdr *rb_prev_ = node->prev; \
while ((node = RBSTACK_PEEK (rbstack)) != rb_prev_) { \
RBSTACK_POP_IGNORE (rbstack); \
DO_IF_RB_STATS (rb_num_sidetrack_ops++); \
} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | } else { \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | node = node->prev; \
while (1) { \
RBSTACK_PUSH (rbstack, node); \
DO_IF_RB_STATS (rb_num_sidetrack_ops++); \
if (node->flags & RB_THREAD_NEXT) break; \
node = node->next; \
} \
} \
} while (0)
#define LOW_RB_INSERT(tree, node, cmp, insert, replace) \
do { \
int rb_ins_type_; \
RBSTACK_INIT (rbstack); \
if (((node) = *(tree))) { \
LOW_RB_TRACK ( \
rbstack, node, cmp, \
{ \
rb_ins_type_ = 1; /* Got less - link at next. */ \
}, { \
rb_ins_type_ = 0; /* Got equal - replace. */ \
{replace;} \
RBSTACK_FREE (rbstack); \
}, { \
rb_ins_type_ = 2; /* Got greater - link at prev. */ \
}); \
} \
else rb_ins_type_ = 3; \
if (rb_ins_type_) { \
DO_IF_DEBUG ((node) = 0); \
{insert;} \
switch (rb_ins_type_) { \
case 1: low_rb_link_at_next ((tree), rbstack, (node)); break; \
case 2: low_rb_link_at_prev ((tree), rbstack, (node)); break; \
case 3: low_rb_init_root (*(tree) = (node)); break; \
} \
} \
} while (0)
#define LOW_RB_MERGE(label, a, b, res, length, operation, \
prep_a, prep_b, cmp, copy_a, free_a, copy_b, free_b) \
do { \
struct rb_node_hdr *new_node; \
int cmp_res, op_ = 0; /* Init only to avoid warnings. */ \
/* Traverse backwards so that the merge "gravitates" towards the */ \
/* end when duplicate entries are processed, e.g. */ \
/* (<1:1, 1:2>) | (<1:3>) produces (<1:1, 1:3>) and not */ \
/* (<1:3, 1:2>). */ \
\
a = rb_last (a); \
b = rb_last (b); \
res = 0; \
\
while (1) { \
/* A bit quirky code to avoid expanding the code blocks more */ \
/* than once. */ \
if (a) {prep_a;} \
if (b) { \
{prep_b;} \
if (a) { \
{cmp;} \
/* Result reversed due to backward direction. */ \
if (cmp_res > 0) \
op_ = operation >> 8; \
else if (cmp_res < 0) \
op_ = operation; \
else \
op_ = operation >> 4; \
} \
else if (operation & PIKE_ARRAY_OP_B) \
goto PIKE_CONCAT (label, _copy_b); \
else \
goto PIKE_CONCAT (label, _free_b); \
} \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | else if (a) { \
if (operation & (PIKE_ARRAY_OP_A << 8)) \
goto PIKE_CONCAT (label, _copy_a); \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | else \
|
c27901 | 2015-04-25 | Henrik Grubbström (Grubba) | | goto PIKE_CONCAT (label, _free_a); \
} else \
break; \
|
bdf669 | 2001-12-10 | Martin Stjernholm | | \
if (op_ & PIKE_ARRAY_OP_B) { \
PIKE_CONCAT (label, _copy_b):; \
{copy_b;} \
new_node->next = res, res = new_node; \
length++; \
b = rb_prev (b); \
} \
else if (op_ & PIKE_ARRAY_OP_SKIP_B) { \
PIKE_CONCAT (label, _free_b):; \
new_node = rb_prev (b); \
{free_b;} \
b = new_node; \
} \
\
if (a) { \
if (op_ & PIKE_ARRAY_OP_A) { \
if (!(op_ & PIKE_ARRAY_OP_B)) { \
PIKE_CONCAT (label, _copy_a):; \
{copy_a;} \
new_node->next = res, res = new_node; \
length++; \
a = rb_prev (a); \
} \
} \
else if (op_ & PIKE_ARRAY_OP_SKIP_A) { \
PIKE_CONCAT (label, _free_a):; \
new_node = rb_prev (a); \
{free_a;} \
a = new_node; \
} \
} \
} \
} while (0)
|
834745 | 2011-03-06 | Martin Stjernholm | | #endif /* RBTREE_LOW_H */
|