pike.git / src / rbtree.c

version» Context lines:

pike.git/src/rbtree.c:1:   /* An implementation of a threaded red/black balanced binary tree.    * -  * Created 2001-04-27 by Martin Stjernholm -  * -  * $Id: rbtree.c,v 1.8 2001/06/10 14:03:47 grubba Exp $ +  * Created 2001-04-27 by Martin Stjernholm <mast@lysator.liu.se>    */      #include "global.h" - #include "constants.h" - #include "builtin_functions.h" +  + RCSID("$Id: rbtree.c,v 1.9 2001/12/10 00:57:17 mast Exp $"); +    #include "interpret.h" - #include "mapping.h" +    #include "pike_error.h" - #include "svalue.h" - #include "rbtree.h" - #include "block_alloc.h" + #include "rbtree_low.h"    - /* #define TEST_RBTREE */ + #include <assert.h> + #include <stdlib.h>    - #if defined (PIKE_DEBUG) || defined (TEST_RBTREE) + #if defined (PIKE_DEBUG) || defined (TEST_MULTISET)    - typedef void dump_data_fn (struct rb_node_hdr *node); - static void debug_dump_ind_data (struct rb_node_ind *node); - static void debug_dump_rb_tree (struct rb_node_hdr *tree, dump_data_fn *dump_data); + DECLSPEC(noreturn) static void debug_rb_fatal ( +  struct rb_node_hdr *tree, +  const char *fmt, ...) ATTRIBUTE((noreturn, format (printf, 2, 3))); + #define rb_fatal \ +  (fprintf (stderr, "%s:%d: Fatal in rbtree: ", __FILE__, __LINE__), \ +  debug_rb_fatal) + DECLSPEC(noreturn) static void debug_custom_rb_fatal ( +  struct rb_node_hdr *tree, dump_data_fn *dump_data, +  void *extra, const char *fmt, ...) ATTRIBUTE((noreturn, format (printf, 4, 5))); + #define custom_rb_fatal \ +  (fprintf (stderr, "%s:%d: Fatal in rbtree: ", __FILE__, __LINE__), \ +  debug_custom_rb_fatal)    - DECLSPEC(noreturn) static void debug_rb_fatal (struct rb_node_hdr *tree, const char *fmt, ...) ATTRIBUTE((noreturn, format (printf, 2, 3))); - DECLSPEC(noreturn) static void debug_rb_ind_fatal (struct rb_node_ind *tree, const char *fmt, ...) ATTRIBUTE((noreturn, format (printf, 2, 3))); - DECLSPEC(noreturn) static void debug_rb_indval_fatal (struct rb_node_indval *tree, const char *fmt, ...) ATTRIBUTE((noreturn, format (printf, 2, 3))); - #define rb_fatal (fprintf (stderr, "%s:%d: Fatal in rbtree: ", \ -  __FILE__, __LINE__), debug_rb_fatal) - #define rb_ind_fatal (fprintf (stderr, "%s:%d: Fatal in ind rbtree: ", \ -  __FILE__, __LINE__), debug_rb_ind_fatal) - #define rb_indval_fatal (fprintf (stderr, "%s:%d: Fatal in indval rbtree: ", \ -  __FILE__, __LINE__), debug_rb_indval_fatal) -  +    #endif    - #define HDR(node) ((struct rb_node_hdr *) (node)) - #define IND(node) ((struct rb_node_ind *) (node)) - #define INDVAL(node) ((struct rb_node_indval *) (node)) -  - /* The default compare function for ind and indval. */ - PMOD_EXPORT int rb_ind_default_cmp (struct svalue *key, union rb_node *node) + void rbstack_push (struct rbstack_ptr *rbstack, struct rb_node_hdr *node)   { -  struct svalue tmp; -  return set_svalue_cmpfun (key, use_rb_node_ind (&(node->i), tmp)); +  struct rbstack_slice *new = ALLOC_STRUCT (rbstack_slice); +  new->up = rbstack->slice; +  new->stack[0] = node; + #ifdef RB_STATS +  rbstack_slice_allocs++; +  new->depth = rbstack->slice->depth; +  new->maxdepth = rbstack->slice->maxdepth; + #endif +  rbstack->slice = new; +  rbstack->ssp = 1;   }    - struct svalue_cmp_data + void rbstack_pop (struct rbstack_ptr *rbstack)   { -  struct svalue *key, *cmp_less; - }; -  - static int svalue_cmp_eq (struct svalue_cmp_data *data, union rb_node *node) - { -  int cmp_res; -  struct svalue tmp; -  push_svalue (data->key); -  push_svalue (use_rb_node_ind (&(node->i), tmp)); -  apply_svalue (data->cmp_less, 2); -  if (IS_ZERO (sp - 1)) -  cmp_res = !is_eq (data->key, &tmp); -  else -  cmp_res = -1; -  pop_stack(); -  return cmp_res; +  struct rbstack_slice *old = rbstack->slice; +  rbstack->slice = old->up; + #ifdef RB_STATS +  rbstack->slice->maxdepth = old->maxdepth; + #endif +  xfree (old); +  rbstack->ssp = STACK_SLICE_SIZE;   }    - /* Considers equal as greater. */ - static int svalue_cmp_ge (struct svalue_cmp_data *data, union rb_node *node) + void rbstack_up (struct rbstack_ptr *rbstack)   { -  int cmp_res; -  struct svalue tmp; -  push_svalue (data->key); -  push_svalue (use_rb_node_ind (&(node->i), tmp)); -  apply_svalue (data->cmp_less, 2); -  cmp_res = IS_ZERO (sp - 1) ? 1 : -1; -  pop_stack(); -  return cmp_res; +  rbstack->slice = rbstack->slice->up; +  rbstack->ssp = STACK_SLICE_SIZE;   }    - /* Considers equal as less. */ - static int svalue_cmp_le (struct svalue_cmp_data *data, union rb_node *node) + void rbstack_up_to_root (struct rbstack_ptr *rbstack)   { -  int cmp_res; -  struct svalue tmp; -  push_svalue (use_rb_node_ind (&(node->i), tmp)); -  push_svalue (data->key); -  apply_svalue (data->cmp_less, 2); -  cmp_res = IS_ZERO (sp - 1) ? -1 : 1; -  pop_stack(); -  return cmp_res; +  struct rbstack_slice *up; +  while ((up = rbstack->slice->up)) rbstack->slice = up; +  rbstack->ssp = 0;   }    - /* Functions for handling nodes with index only. */ -  - BLOCK_ALLOC (rb_node_ind, 1024) -  - PMOD_EXPORT struct rb_node_ind *rb_ind_insert (struct rb_node_ind **tree, -  struct svalue *ind, -  struct svalue *cmp_less) + void rbstack_free (struct rbstack_ptr *rbstack)   { -  struct rb_node_hdr *node; -  struct svalue tmp; -  -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = ind; -  LOW_RB_INSERT ((struct rb_node_hdr **) tree, node, { /* Compare. */ -  cmp_res = svalue_cmp_eq (&data, (union rb_node *) node); -  }, { /* Insert. */ -  struct rb_node_ind *new = alloc_rb_node_ind(); -  node = HDR (new); -  assign_svalue_no_free (&new->ind, ind); -  DO_IF_DEBUG (new->ind.type |= RB_FLAG_MARKER); -  }, { /* Replace. */ -  node = 0; -  }); +  struct rbstack_slice *ptr = rbstack->slice; +  do { +  struct rbstack_slice *old = ptr; +  ptr = ptr->up; + #ifdef RB_STATS +  ptr->maxdepth = old->maxdepth; + #endif +  xfree (old); +  } while (ptr->up); +  rbstack->slice = ptr; +  rbstack->ssp = 0;   }    -  else { -  LOW_RB_INSERT ((struct rb_node_hdr **) tree, node, { /* Compare. */ -  cmp_res = set_svalue_cmpfun (ind, use_rb_node_ind (IND (node), tmp)); -  }, { /* Insert. */ -  struct rb_node_ind *new = alloc_rb_node_ind(); -  node = HDR (new); -  assign_svalue_no_free (&new->ind, ind); -  DO_IF_DEBUG (new->ind.type |= RB_FLAG_MARKER); -  }, { /* Replace. */ -  node = 0; -  }); -  } -  -  return IND (node); - } -  - PMOD_EXPORT struct rb_node_ind *rb_ind_add (struct rb_node_ind **tree, -  struct svalue *ind, -  struct svalue *cmp_less) + /* Inserts the given node at *pos and advances *pos. *top is the top +  * of the stack, which also is advanced to keep track of the top. */ + void rbstack_insert (struct rbstack_ptr *top, struct rbstack_ptr *pos, +  struct rb_node_hdr *node)   { -  struct rb_node_ind *new = alloc_rb_node_ind(); -  assign_svalue_no_free (&new->ind, ind); +  struct rbstack_ptr rbp1 = *top, rbp2 = *top, rbpos = *pos; +  RBSTACK_PUSH (rbp2, NULL); +  *top = rbp2; +  +  while (rbp1.ssp != rbpos.ssp || rbp1.slice != rbpos.slice) { +  rbp2.slice->stack[--rbp2.ssp] = rbp1.slice->stack[--rbp1.ssp]; +  if (!rbp2.ssp) rbp2.slice = rbp1.slice, rbp2.ssp = STACK_SLICE_SIZE; +  if (!rbp1.ssp) { +  if (rbp1.slice->up) rbstack_up (&rbp1);   #ifdef PIKE_DEBUG -  new->ind.type |= RB_FLAG_MARKER; +  else if (rbp1.ssp != rbpos.ssp || rbp1.slice != rbpos.slice) +  fatal ("Didn't find the given position on the stack.\n");   #endif -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = ind; -  low_rb_add ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) svalue_cmp_eq, &data, -  HDR (new)); +     } -  else -  low_rb_add ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) rb_ind_default_cmp, ind, -  HDR (new)); -  return new; +     }    - PMOD_EXPORT int rb_ind_delete (struct rb_node_ind **tree, -  struct svalue *ind, -  struct svalue *cmp_less) - { -  struct rb_node_ind *old; -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = ind; -  old = IND (low_rb_delete ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) svalue_cmp_eq, &data, -  sizeof (struct rb_node_ind))); +  RBSTACK_POKE (rbp2, node); +  *pos = rbp2;   } -  else -  old = IND (low_rb_delete ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) rb_ind_default_cmp, ind, -  sizeof (struct rb_node_ind))); -  if (old) { -  struct svalue tmp; -  free_svalue (use_rb_node_ind (old, tmp)); -  really_free_rb_node_ind (old); -  return 1; -  } -  return 0; - } +     - static struct rb_node_ind *copy_ind_node (struct rb_node_ind *node) + void rbstack_assign (struct rbstack_ptr *target, struct rbstack_ptr *source)   { -  struct rb_node_ind *new = alloc_rb_node_ind(); -  struct svalue tmp; -  assign_svalue_no_free (&new->ind, use_rb_node_ind (node, tmp)); -  DO_IF_DEBUG (new->ind.type |= RB_FLAG_MARKER); -  return new; - } +  /* Not tested. */    - PMOD_EXPORT struct rb_node_ind *rb_ind_copy (struct rb_node_ind *tree) - { -  return IND (low_rb_copy (HDR (tree), (low_rb_copy_fn *) copy_ind_node)); - } +  struct rbstack_slice *src_slice = source->slice; +  struct rbstack_slice *tgt_slice = target->slice;    - PMOD_EXPORT void rb_ind_free (struct rb_node_ind *tree) - { -  struct rb_node_hdr *node = HDR (tree); -  RBSTACK_INIT (rbstack); -  LOW_RB_TRAVERSE (1, rbstack, node, ;, ;, ;, ;, ;, ;, { -  /* Pop. */ -  IND (node)->ind.type &= ~RB_IND_FLAG_MASK; -  free_svalue (&IND (node)->ind); -  really_free_rb_node_ind (IND (node)); -  }); - } -  - /* Functions for handling nodes with index and value. */ -  - BLOCK_ALLOC (rb_node_indval, 1024) -  - PMOD_EXPORT struct rb_node_indval *rb_indval_insert (struct rb_node_indval **tree, -  struct svalue *ind, -  struct svalue *val, -  struct svalue *cmp_less) - { -  struct rb_node_hdr *node; -  struct svalue tmp; -  -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = ind; -  LOW_RB_INSERT ((struct rb_node_hdr **) tree, node, { /* Compare. */ -  cmp_res = svalue_cmp_eq (&data, (union rb_node *) node); -  }, { /* Insert. */ -  struct rb_node_indval *new = alloc_rb_node_indval(); -  node = HDR (new); -  assign_svalue_no_free (&new->ind, ind); -  assign_svalue_no_free (&new->val, val); -  DO_IF_DEBUG (new->ind.type |= RB_FLAG_MARKER); -  }, { /* Replace. */ -  node = 0; -  }); -  } -  -  else { -  LOW_RB_INSERT ((struct rb_node_hdr **) tree, node, { /* Compare. */ -  cmp_res = set_svalue_cmpfun (ind, use_rb_node_ind (INDVAL (node), tmp)); -  }, { /* Insert. */ -  struct rb_node_indval *new = alloc_rb_node_indval(); -  node = HDR (new); -  assign_svalue_no_free (&new->ind, ind); -  assign_svalue_no_free (&new->val, val); -  DO_IF_DEBUG (new->ind.type |= RB_FLAG_MARKER); -  }, { /* Replace. */ -  node = 0; -  }); -  } -  -  return INDVAL (node); - } -  - PMOD_EXPORT struct rb_node_indval *rb_indval_add (struct rb_node_indval **tree, -  struct svalue *ind, -  struct svalue *val, -  struct svalue *cmp_less) - { -  struct rb_node_indval *new = alloc_rb_node_indval(); -  assign_svalue_no_free (&new->ind, ind); -  assign_svalue_no_free (&new->val, val); +    #ifdef PIKE_DEBUG -  new->ind.type |= RB_FLAG_MARKER; +  if (target->ssp) fatal ("target rbstack not empty.\n");   #endif -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = ind; -  low_rb_add ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) svalue_cmp_eq, &data, -  HDR (new)); -  } -  else -  low_rb_add ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) rb_ind_default_cmp, ind, -  HDR (new)); -  return new; - } +     - PMOD_EXPORT struct rb_node_indval *rb_indval_add_after (struct rb_node_indval **tree, -  struct rb_node_indval *node, -  struct svalue *ind, -  struct svalue *val, -  struct svalue *cmp_less) - { -  struct rb_node_indval *new = alloc_rb_node_indval(); -  assign_svalue_no_free (&new->ind, ind); -  assign_svalue_no_free (&new->val, val); +  target->ssp = source->ssp; +  source->ssp = 0;    - #ifdef PIKE_DEBUG -  new->ind.type |= RB_FLAG_MARKER; -  -  if (*tree) { /* Check that it won't break the order. */ -  struct rb_node_indval *tmpnode; -  struct svalue tmp; -  int cmp1, cmp2; -  - #define DO_CMP(a, b, cmp) \ -  do { \ -  if (cmp_less) { \ -  push_svalue (a); \ -  push_svalue (b); \ -  apply_svalue (cmp_less, 2); \ -  if (IS_ZERO (sp - 1)) \ -  cmp = !is_eq (a, b); \ -  else \ -  cmp = -1; \ -  pop_stack(); \ -  } \ -  else cmp = set_svalue_cmpfun (a, b); \ -  } while (0) -  -  if (node) { -  use_rb_node_ind (node, tmp); -  DO_CMP (ind, &tmp, cmp1); -  tmpnode = rb_indval_next (node); -  if (tmpnode) { -  use_rb_node_ind (tmpnode, tmp); -  DO_CMP (ind, &tmp, cmp2); +  if (src_slice->up) { +  struct rbstack_slice *prev_slice; +  target->slice = src_slice; +  do { +  prev_slice = src_slice; +  src_slice = src_slice->up; +  } while (src_slice->up); +  MEMCPY ((char *) &tgt_slice->stack, (char *) &src_slice->stack, +  STACK_SLICE_SIZE * sizeof (struct rb_node_hdr *)); +  prev_slice->up = tgt_slice; +  source->slice = src_slice;    } -  else cmp2 = -1; -  if (cmp1 < 0 || cmp2 > 0) -  fatal ("Adding at this position would break the order.\n"); -  } +     else { -  use_rb_node_ind (rb_indval_first (*tree), tmp); -  DO_CMP (ind, &tmp, cmp1); -  if (cmp1 > 0) fatal ("Adding at beginning would break the order.\n"); -  } - #undef DO_CMP -  } +  MEMCPY ((char *) &tgt_slice->stack, (char *) &src_slice->stack, +  target->ssp * sizeof (struct rb_node_hdr *)); + #ifdef RB_STATS +  tgt_slice->maxdepth = src_slice->maxdepth; +  tgt_slice->depth = src_slice->depth;   #endif -  -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = ind; -  low_rb_add_after ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) svalue_cmp_eq, &data, -  HDR (new), HDR (node)); +     } -  else -  low_rb_add_after ((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) rb_ind_default_cmp, ind, -  HDR (new), HDR (node)); -  return new; +    }    - PMOD_EXPORT int rb_indval_delete (struct rb_node_indval **tree, -  struct svalue *ind, -  struct svalue *cmp_less) + void rbstack_copy (struct rbstack_ptr *target, struct rbstack_ptr *source)   { -  struct rb_node_indval *old; -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = ind; -  old = INDVAL(low_rb_delete((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) svalue_cmp_eq, &data, -  sizeof (struct rb_node_indval))); -  } -  else -  old = INDVAL(low_rb_delete((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) rb_ind_default_cmp, ind, -  sizeof (struct rb_node_indval))); -  if (old) { -  struct svalue tmp; -  free_svalue (use_rb_node_ind (old, tmp)); -  free_svalue (&old->val); -  really_free_rb_node_indval (old); -  return 1; -  } -  return 0; - } +  /* Not tested. */    - /* Returns the pointer to the node that actually was freed. */ - PMOD_EXPORT struct rb_node_indval *rb_indval_delete_node (struct rb_node_indval **tree, -  struct rb_node_indval *node, -  struct svalue *cmp_less) - { -  struct svalue tmp; -  struct rb_node_indval *old; -  use_rb_node_ind (node, tmp); -  if (cmp_less) { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = &tmp; -  old = INDVAL(low_rb_delete_node((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) svalue_cmp_eq, &data, -  HDR(node), sizeof(struct rb_node_indval))); -  } -  else -  old = INDVAL(low_rb_delete_node((struct rb_node_hdr **) tree, -  (low_rb_cmp_fn *) rb_ind_default_cmp, &tmp, -  HDR(node), sizeof(struct rb_node_indval))); -  free_svalue (use_rb_node_ind (old, tmp)); -  free_svalue (&old->val); -  really_free_rb_node_indval (old); -  return old; - } +  struct rbstack_slice *src_slice = source->slice; +  struct rbstack_slice *tgt_stack_slice = target->slice; +  size_t ssp = source->ssp;    - static struct rb_node_indval *copy_indval_node (struct rb_node_indval *node) - { -  struct rb_node_indval *new = alloc_rb_node_indval(); -  struct svalue tmp; -  assign_svalue_no_free (&new->ind, use_rb_node_ind (node, tmp)); -  assign_svalue_no_free (&new->val, &node->val); -  DO_IF_DEBUG (new->ind.type |= RB_FLAG_MARKER); -  return new; - } + #ifdef PIKE_DEBUG +  if (target->ssp) fatal ("target rbstack not empty.\n"); + #endif    - PMOD_EXPORT struct rb_node_indval *rb_indval_copy (struct rb_node_indval *tree) - { -  return INDVAL (low_rb_copy (HDR (tree), (low_rb_copy_fn *) copy_indval_node)); - } +  target->ssp = ssp;    - PMOD_EXPORT void rb_indval_free (struct rb_node_indval *tree) - { -  struct rb_node_hdr *node = HDR (tree); -  RBSTACK_INIT (rbstack); -  LOW_RB_TRAVERSE (1, rbstack, node, ;, ;, ;, ;, ;, ;, { -  /* Pop. */ -  INDVAL (node)->ind.type &= ~RB_IND_FLAG_MASK; -  free_svalue (&INDVAL (node)->ind); -  free_svalue (&INDVAL (node)->val); -  really_free_rb_node_indval (INDVAL (node)); -  }); +  if (src_slice->up) { +  struct rbstack_slice *tgt_slice = +  target->slice = ALLOC_STRUCT (rbstack_slice); + #ifdef RB_STATS +  rbstack_slice_allocs++; + #endif +  MEMCPY ((char *) &tgt_slice->stack, (char *) &src_slice->stack, +  ssp * sizeof (struct rb_node_hdr *)); +  ssp = STACK_SLICE_SIZE; +  while ((src_slice = src_slice->up)->up) { +  tgt_slice->up = ALLOC_STRUCT (rbstack_slice); +  tgt_slice = tgt_slice->up; + #ifdef RB_STATS +  rbstack_slice_allocs++; + #endif +  MEMCPY ((char *) &tgt_slice->stack, (char *) &src_slice->stack, +  STACK_SLICE_SIZE * sizeof (struct rb_node_hdr *));    } -  - /* Functions for handling both types of nodes. */ -  - PMOD_EXPORT union rb_node *rb_find_eq_extcmp (union rb_node *tree, struct svalue *key, -  struct svalue *cmp_less) - { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = key; -  return (union rb_node *) -  low_rb_find_eq (HDR (tree), (low_rb_cmp_fn *) svalue_cmp_eq, &data); +  tgt_slice->up = tgt_stack_slice;    }    - PMOD_EXPORT union rb_node *rb_find_lt_extcmp (union rb_node *tree, struct svalue *key, -  struct svalue *cmp_less) - { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = key; -  return (union rb_node *) -  low_rb_find_lt (HDR (tree), (low_rb_cmp_fn *) svalue_cmp_le, &data); - } +  MEMCPY ((char *) &tgt_stack_slice->stack, (char *) &src_slice->stack, +  ssp * sizeof (struct rb_node_hdr *));    - PMOD_EXPORT union rb_node *rb_find_gt_extcmp (union rb_node *tree, struct svalue *key, -  struct svalue *cmp_less) - { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = key; -  return (union rb_node *) -  low_rb_find_gt (HDR (tree), (low_rb_cmp_fn *) svalue_cmp_ge, &data); + #ifdef RB_STATS +  target->slice->maxdepth = target->slice->depth = source->slice->depth; + #endif   }    - PMOD_EXPORT union rb_node *rb_find_le_extcmp (union rb_node *tree, struct svalue *key, -  struct svalue *cmp_less) + /* Offsets all the rb_node_hdr pointers in rbstack from their offsets +  * relative oldbase to the same relative newbase. Useful if the memory +  * block containing the tree has been moved. */ + void rbstack_shift (struct rbstack_ptr rbstack, +  struct rb_node_hdr *oldbase, +  struct rb_node_hdr *newbase)   { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = key; -  return (union rb_node *) -  low_rb_find_le (HDR (tree), (low_rb_cmp_fn *) svalue_cmp_ge, &data); +  struct rb_node_hdr *node; +  while ((node = RBSTACK_PEEK (rbstack))) { +  rbstack.slice->stack[rbstack.ssp - 1] = +  (struct rb_node_hdr *) ((char *) node - (char *) oldbase + (char *) newbase); +  RBSTACK_UP_IGNORE (rbstack);    } -  - PMOD_EXPORT union rb_node *rb_find_ge_extcmp (union rb_node *tree, struct svalue *key, -  struct svalue *cmp_less) - { -  struct svalue_cmp_data data; -  data.cmp_less = cmp_less; -  data.key = key; -  return (union rb_node *) -  low_rb_find_ge (HDR (tree), (low_rb_cmp_fn *) svalue_cmp_le, &data); +    }    - /* Functions for handling any type of node. */ -  +    /* Each of these step functions is O(log n), but when used to loop    * through a tree they still sum up to O(n) since every node is    * visited at most twice. */    - PMOD_EXPORT struct rb_node_hdr *rb_first (struct rb_node_hdr *tree) + PMOD_EXPORT inline struct rb_node_hdr *rb_first (struct rb_node_hdr *root)   { -  if (tree) -  while (tree->prev) tree = tree->prev; -  return tree; +  DO_IF_RB_STATS (rb_num_sidesteps++; rb_num_sidestep_ops++); +  if (root) +  while (root->prev) { +  root = root->prev; +  DO_IF_RB_STATS (rb_num_sidestep_ops++);    } -  +  return root; + }    - PMOD_EXPORT struct rb_node_hdr *rb_last (struct rb_node_hdr *tree) + PMOD_EXPORT inline struct rb_node_hdr *rb_last (struct rb_node_hdr *root)   { -  if (tree) -  while (tree->next) tree = tree->next; -  return tree; +  DO_IF_RB_STATS (rb_num_sidesteps++; rb_num_sidestep_ops++); +  if (root) +  while (root->next) { +  root = root->next; +  DO_IF_RB_STATS (rb_num_sidestep_ops++);    } -  +  return root; + }    - PMOD_EXPORT struct rb_node_hdr *rb_prev (struct rb_node_hdr *node) + PMOD_EXPORT inline struct rb_node_hdr *rb_link_prev (struct rb_node_hdr *node)   { -  if (node->flags & RB_THREAD_PREV) -  return node->prev; -  else { +     node = node->prev; -  while (!(node->flags & RB_THREAD_NEXT)) +  DO_IF_RB_STATS (rb_num_sidestep_ops++); +  while (!(node->flags & RB_THREAD_NEXT)) {    node = node->next; -  +  DO_IF_RB_STATS (rb_num_sidestep_ops++); +  }    return node;   } - } +     - PMOD_EXPORT struct rb_node_hdr *rb_next (struct rb_node_hdr *node) + PMOD_EXPORT inline struct rb_node_hdr *rb_link_next (struct rb_node_hdr *node)   { -  if (node->flags & RB_THREAD_NEXT) -  return node->next; -  else { +     node = node->next; -  while (!(node->flags & RB_THREAD_PREV)) +  DO_IF_RB_STATS (rb_num_sidestep_ops++); +  while (!(node->flags & RB_THREAD_PREV)) {    node = node->prev; -  +  DO_IF_RB_STATS (rb_num_sidestep_ops++); +  }    return node;   } - } +     - /* The low level stuff. */ -  +    /* Sets the pointer in parent that points to child. */ - #define SET_PTR_TO_CHILD(parent, child, prev_val, next_val) \ - do { \ -  if (child == parent->prev) \ -  parent->prev = prev_val; \ + #define SET_PTR_TO_CHILD(PARENT, CHILD, PREV_VAL, NEXT_VAL) do { \ +  if (CHILD == PARENT->prev) \ +  PARENT->prev = PREV_VAL; \    else { \    DO_IF_DEBUG( \ -  if (child != parent->next) \ -  rb_fatal (parent, "Got invalid parent to %p " \ -  "(stack probably wrong).\n", child); \ -  ); \ -  parent->next = next_val; \ +  if (CHILD != PARENT->next) \ +  rb_fatal (PARENT, "Got invalid parent to %p " \ +  "(stack probably wrong).\n", CHILD); \ +  ); \ +  PARENT->next = NEXT_VAL; \    } \   } while (0)      /* node ret    * / \ / \    * / \ / \    * ret c ==> a node    * / \ / \    * / \ / \    * a b b c
pike.git/src/rbtree.c:610:    if (!parent) rb_fatal (node, "No parent on stack.\n");   #endif    RBSTACK_POP (rbstack, grandparent);    top = grandparent ? grandparent : parent;       while (parent->flags & RB_RED) {    /* Since the root always is black we know there's a grandparent. */   #ifdef PIKE_DEBUG    if (!grandparent) rb_fatal (parent, "No parent for red node.\n");   #endif + #ifdef RB_STATS +  rb_add_rebalance_cnt++; + #endif       if (parent == grandparent->prev) {    uncle = grandparent->next;       if (!(grandparent->flags & RB_THREAD_NEXT) && uncle->flags & RB_RED) {    /* Case 1:    * grandparent(B) *grandparent(R)    * / \ / \    * / \ / \    * parent(R) uncle(R) ==> parent(B) uncle(B)
pike.git/src/rbtree.c:728:    }    }       RBSTACK_FREE_SET_ROOT (rbstack, top);   #ifdef PIKE_DEBUG    if (top->flags & RB_RED) rb_fatal (top, "Root node not black.\n");   #endif    return top;   }    - /* Returns the root node, which might have changed. The passed stack -  * is freed. node is the value of the pointer in parent that used to -  * point to the unlinked node. */ + /* Returns the root node, which might have changed. The rbstack +  * pointed to by rbstack_ptr goes down to the parent of the node that +  * was unlinked. node is the value of the pointer in the parent that +  * used to point to the unlinked node. +  * +  * *rbstack_ptr is freed if keep_rbstack is zero, otherwise it's set +  * to the new stack down to the parent node. */   static struct rb_node_hdr *rebalance_after_delete (struct rb_node_hdr *node, -  struct rbstack_ptr rbstack) +  struct rbstack_ptr *rbstack_ptr, +  int keep_rbstack)   {    struct rb_node_hdr *parent, *sibling, *top = node; -  +  struct rbstack_ptr rbstack = *rbstack_ptr, rbstack_top; +  +  if (keep_rbstack) { +  rbstack_top = rbstack; +  RBSTACK_UP (rbstack, parent); +  } +  else    RBSTACK_POP (rbstack, parent);       if (!parent) { -  if (!node) return 0; +  if (!node) return NULL;    node->flags &= ~RB_RED;    }    else do {    top = parent; -  + #ifdef RB_STATS +  rb_add_rebalance_cnt++; + #endif       if (node == parent->prev) {    if (!(parent->flags & RB_THREAD_PREV) && node->flags & RB_RED) {    node->flags &= ~RB_RED;    break;    }   #ifdef PIKE_DEBUG    if (parent->flags & RB_THREAD_NEXT)    /* Since the node in the prev subtree is the "concatenation"    * of two black nodes, there must be at least two black nodes
pike.git/src/rbtree.c:772:    * / \ / \    * / \ / \    * *node(2B) sibling(R) ==> parent(R) (B)    * / \ / \    * / \ / \    * (B) (B) *node(2B) (B)    */    parent->flags |= RB_RED;    sibling->flags &= ~RB_RED;    rot_left (parent); -  RBSTACK_POP (rbstack, top); +  top = RBSTACK_PEEK (rbstack);    if (top)    SET_PTR_TO_CHILD (top, parent, sibling, sibling);    else    top = sibling; -  RBSTACK_PUSH (rbstack, sibling); +  if (keep_rbstack) rbstack_insert (&rbstack_top, &rbstack, sibling); +  else RBSTACK_PUSH (rbstack, sibling);   #ifdef PIKE_DEBUG    if (parent->flags & RB_THREAD_NEXT)    rb_fatal (parent, "Sibling in next is null; tree was unbalanced.\n");   #endif    sibling = parent->next;    goto prev_node_red_parent;    }    -  if (!(parent->flags & RB_RED)) { +  else if (!(parent->flags & RB_RED)) {    if ((sibling->flags & RB_THREAD_PREV || !(sibling->prev->flags & RB_RED)) &&    (sibling->flags & RB_THREAD_NEXT || !(sibling->next->flags & RB_RED))) {    /* Case 2a:    * parent(B) *parent(2B)    * / \ / \    * / \ / \    * *node(2B) sibling(B) ==> node(B) sibling(R)    * / \ / \    * / \ / \    * (B) (B) (B) (B)    */    sibling->flags |= RB_RED;    node = parent; -  RBSTACK_POP (rbstack, parent); +  if (keep_rbstack) RBSTACK_UP (rbstack, parent); +  else RBSTACK_POP (rbstack, parent);    continue;    }    }       else {    prev_node_red_parent:    if ((sibling->flags & RB_THREAD_PREV || !(sibling->prev->flags & RB_RED)) &&    (sibling->flags & RB_THREAD_NEXT || !(sibling->next->flags & RB_RED))) {    /* Case 2b:    * parent(R) parent(B)
pike.git/src/rbtree.c:827:    * => Done.    */    parent->flags &= ~RB_RED;    sibling->flags |= RB_RED;    break;    }    }       if (sibling->flags & RB_THREAD_NEXT || !(sibling->next->flags & RB_RED)) {    /* Case 3: -  * parent(?) parent(?) -  * / \ / \ -  * / \ / \ -  * *node(2B) sibling(B) *node(2B) new sibling(B) -  * / \ / \ -  * / \ ==> / \ -  * new sibling(R) (B) (B) sibling(R) -  * / \ / \ -  * / \ / \ -  * (B) (B) (B) (B) +  * parent(?) parent(?) +  * / \ / \ +  * / \ / \ +  * *node(2B) sibling(B) *node(2B) new sibling(B) +  * / \ / \ +  * / \ ==> / \ +  * new sibling(R) (B) (B) sibling(R) +  * / \ / \ +  * / \ / \ +  * (B) (B) (B) (B)    */    sibling->flags |= RB_RED;    sibling = parent->next = rot_right (sibling);    sibling->flags &= ~RB_RED;    }       /* Case 4:    * parent(?) sibling(?)    * / \ / \    * / \ / \    * *node(2B) sibling(B) ==> parent(B) (B)    * / \ / \    * / \ / \    * (?) (R) node(B) (?)    * => Done.    */ -  +  assert (!(sibling->flags & RB_RED));    rot_left (parent); -  RBSTACK_POP (rbstack, top); +  if (keep_rbstack) top = RBSTACK_PEEK (rbstack); +  else RBSTACK_POP (rbstack, top);    if (top) {    SET_PTR_TO_CHILD (top, parent, sibling, sibling);    if (parent->flags & RB_RED) {    sibling->flags |= RB_RED;    parent->flags &= ~RB_RED;    } -  else sibling->flags &= ~RB_RED; +     }    else {    top = sibling; -  /* sibling is the new root, which should always be black. -  * So don't transfer the color from parent. */ -  sibling->flags &= ~RB_RED; -  parent->flags &= ~RB_RED; +  /* parent was the root, which always is black. */ +  assert (!(parent->flags & RB_RED));    }    sibling->next->flags &= ~RB_RED; -  +  if (keep_rbstack) { +  /* Keep rbstack above the inserted element, so that we don't +  * need to use RBSTACK_UP. */ +  struct rbstack_ptr tmp = rbstack; +  rbstack_insert (&rbstack_top, &tmp, sibling); +  }    break;    }       else {   #ifdef PIKE_DEBUG    if (node != parent->next)    rb_fatal (parent,    "Childs parent doesn't know about it (stack probably wrong).\n");   #endif    /* The mirrored version of the above. */ -  +     if (!(parent->flags & RB_THREAD_NEXT) && node->flags & RB_RED) {    node->flags &= ~RB_RED;    break;    }   #ifdef PIKE_DEBUG    if (parent->flags & RB_THREAD_PREV)    rb_fatal (parent, "Sibling in prev is null; tree was unbalanced.\n");   #endif    sibling = parent->prev;       if (sibling->flags & RB_RED) {    /* Case 1 */    parent->flags |= RB_RED;    sibling->flags &= ~RB_RED;    rot_right (parent); -  RBSTACK_POP (rbstack, top); +  top = RBSTACK_PEEK (rbstack);    if (top)    SET_PTR_TO_CHILD (top, parent, sibling, sibling);    else    top = sibling; -  RBSTACK_PUSH (rbstack, sibling); +  if (keep_rbstack) rbstack_insert (&rbstack_top, &rbstack, sibling); +  else RBSTACK_PUSH (rbstack, sibling);   #ifdef PIKE_DEBUG    if (parent->flags & RB_THREAD_PREV)    rb_fatal (parent, "Sibling in prev is null; tree was unbalanced.\n");   #endif    sibling = parent->prev;    goto next_node_red_parent;    }    -  if (!(parent->flags & RB_RED)) { +  else if (!(parent->flags & RB_RED)) {    if ((sibling->flags & RB_THREAD_PREV || !(sibling->prev->flags & RB_RED)) &&    (sibling->flags & RB_THREAD_NEXT || !(sibling->next->flags & RB_RED))) {    /* Case 2a */    sibling->flags |= RB_RED;    node = parent; -  RBSTACK_POP (rbstack, parent); +  if (keep_rbstack) RBSTACK_UP (rbstack, parent); +  else RBSTACK_POP (rbstack, parent);    continue;    }    }       else {    next_node_red_parent:    if ((sibling->flags & RB_THREAD_PREV || !(sibling->prev->flags & RB_RED)) &&    (sibling->flags & RB_THREAD_NEXT || !(sibling->next->flags & RB_RED))) {    /* Case 2b */    parent->flags &= ~RB_RED;
pike.git/src/rbtree.c:940:    }       if (sibling->flags & RB_THREAD_PREV || !(sibling->prev->flags & RB_RED)) {    /* Case 3 */    sibling->flags |= RB_RED;    sibling = parent->prev = rot_left (sibling);    sibling->flags &= ~RB_RED;    }       /* Case 4 */ +  assert (!(sibling->flags & RB_RED));    rot_right (parent); -  RBSTACK_POP (rbstack, top); +  if (keep_rbstack) top = RBSTACK_PEEK (rbstack); +  else RBSTACK_POP (rbstack, top);    if (top) {    SET_PTR_TO_CHILD (top, parent, sibling, sibling);    if (parent->flags & RB_RED) {    sibling->flags |= RB_RED;    parent->flags &= ~RB_RED;    } -  else sibling->flags &= ~RB_RED; +     }    else {    top = sibling; -  sibling->flags &= ~RB_RED; -  parent->flags &= ~RB_RED; +  assert (!(parent->flags & RB_RED));    }    sibling->prev->flags &= ~RB_RED; -  +  if (keep_rbstack) { +  struct rbstack_ptr tmp = rbstack; +  rbstack_insert (&rbstack_top, &tmp, sibling); +  }    break;    }    } while (parent);    -  +  if (keep_rbstack) { +  RBSTACK_UP_TO_ROOT (rbstack, top); +  *rbstack_ptr = rbstack_top; +  } +  else {    RBSTACK_FREE_SET_ROOT (rbstack, top); -  +  *rbstack_ptr = rbstack; +  } +    #ifdef PIKE_DEBUG    if (top->flags & RB_RED) rb_fatal (top, "Root node not black.\n");   #endif    return top;   }      void low_rb_init_root (struct rb_node_hdr *node)   {   #ifdef PIKE_DEBUG    if (!node) fatal ("New node is null.\n");   #endif -  + #ifdef RB_STATS +  rb_num_adds++; + #endif    node->flags = (node->flags & ~RB_RED) | RB_THREAD_PREV | RB_THREAD_NEXT; -  node->prev = node->next = 0; +  node->prev = node->next = NULL;   }      /* The passed stack is freed. */ - void low_rb_link_at_prev (struct rb_node_hdr **tree, struct rbstack_ptr rbstack, + void low_rb_link_at_prev (struct rb_node_hdr **root, struct rbstack_ptr rbstack,    struct rb_node_hdr *new)   {    struct rb_node_hdr *parent = RBSTACK_PEEK (rbstack); -  +    #ifdef PIKE_DEBUG    if (!new) fatal ("New node is null.\n");    if (!parent) fatal ("Cannot link in root node.\n");    if (!(parent->flags & RB_THREAD_PREV))    fatal ("Cannot link in node at interior prev link.\n");   #endif -  + #ifdef RB_STATS +  rb_num_adds++; + #endif +     new->flags |= RB_RED | RB_THREAD_PREV | RB_THREAD_NEXT;    new->prev = parent->prev;    new->next = parent;    parent->flags &= ~RB_THREAD_PREV;    parent->prev = new;    -  *tree = rebalance_after_add (new, rbstack); +  *root = rebalance_after_add (new, rbstack);   }      /* The passed stack is freed. */ - void low_rb_link_at_next (struct rb_node_hdr **tree, struct rbstack_ptr rbstack, + void low_rb_link_at_next (struct rb_node_hdr **root, struct rbstack_ptr rbstack,    struct rb_node_hdr *new)   {    struct rb_node_hdr *parent = RBSTACK_PEEK (rbstack); -  +    #ifdef PIKE_DEBUG    if (!new) fatal ("New node is null.\n");    if (!parent) fatal ("Cannot link in root node.\n");    if (!(parent->flags & RB_THREAD_NEXT))    fatal ("Cannot link in node at interior next link.\n");   #endif -  + #ifdef RB_STATS +  rb_num_adds++; + #endif +     new->flags |= RB_RED | RB_THREAD_PREV | RB_THREAD_NEXT;    new->prev = parent;    new->next = parent->next;    parent->flags &= ~RB_THREAD_NEXT;    parent->next = new;    -  *tree = rebalance_after_add (new, rbstack); +  *root = rebalance_after_add (new, rbstack);   }    - #define DO_SIMPLE_UNLINK(parent, unlink, node, replace_with) \ - do { \ -  switch (replace_with) { \ -  DO_IF_DEBUG(default: fatal ("replace_with is %d.\n", replace_with)); \ -  case 0: \ -  if (parent) \ -  SET_PTR_TO_CHILD ( \ -  parent, unlink, \ -  (parent->flags |= RB_THREAD_PREV, node = unlink->prev), \ -  (parent->flags |= RB_THREAD_NEXT, node = unlink->next)); \ -  else \ -  node = 0; \ -  break; \ -  case 1: \ -  node = unlink->prev; \ -  DO_IF_DEBUG ( \ -  /* Since unlink->next is null, unlink->prev must be red and */ \ -  /* can't have children, or else the tree is unbalanced. */ \ -  if (!(node->flags & RB_THREAD_NEXT)) \ -  rb_fatal (parent, "Expected thread prev pointer in %p; " \ -  "tree is unbalanced.\n", node); \ -  ); \ -  node->next = unlink->next; \ -  goto fix_parent; \ -  case 2: \ -  node = unlink->next; \ -  DO_IF_DEBUG ( \ -  if (!(node->flags & RB_THREAD_PREV)) \ -  rb_fatal (parent, "Expected thread prev pointer in %p; " \ -  "tree is unbalanced.\n", node); \ -  ); \ -  node->prev = unlink->prev; \ -  fix_parent: \ -  if (parent) SET_PTR_TO_CHILD (parent, unlink, node, node); \ -  } \ + #define DO_SIMPLE_UNLINK(UNLINK, PARENT, NODE) do { \ +  PARENT = RBSTACK_PEEK (rbstack); \ +  \ +  if (UNLINK->flags & RB_THREAD_PREV) { \ +  if (UNLINK->flags & RB_THREAD_NEXT) { \ +  if (PARENT) \ +  SET_PTR_TO_CHILD ( \ +  PARENT, UNLINK, \ +  (PARENT->flags |= RB_THREAD_PREV, NODE = UNLINK->prev), \ +  (PARENT->flags |= RB_THREAD_NEXT, NODE = UNLINK->next)); \ +  else \ +  NODE = NULL; \ +  goto simple_unlink_done; \ +  } \ +  \ +  else { /* prev is null. */ \ +  NODE = UNLINK->next; \ +  DO_IF_DEBUG ( \ +  /* Since UNLINK->prev is null, UNLINK->next must be red */ \ +  /* and can't have children, or else the tree is */ \ +  /* unbalanced. */ \ +  if (!(NODE->flags & RB_THREAD_PREV)) \ +  rb_fatal (PARENT, "Expected thread prev pointer in %p; " \ +  "tree is unbalanced.\n", NODE); \ +  ); \ +  NODE->prev = UNLINK->prev; \ +  } \ +  } \ +  \ +  else { /* next is null. */ \ +  NODE = UNLINK->prev; \ +  DO_IF_DEBUG ( \ +  if (!(NODE->flags & RB_THREAD_NEXT)) \ +  rb_fatal (PARENT, "Expected thread next pointer in %p; " \ +  "tree is unbalanced.\n", NODE); \ +  ); \ +  NODE->next = UNLINK->next; \ +  } \ +  \ +  if (PARENT) SET_PTR_TO_CHILD (PARENT, UNLINK, NODE, NODE); \ +  simple_unlink_done:; \    } while (0)    - /* The node to unlink is the one on top of the stack. Returns the node -  * that actually got unlinked. The passed stack is freed. See -  * low_rb_delete about node_size. */ - struct rb_node_hdr *low_rb_unlink (struct rb_node_hdr **tree, -  struct rbstack_ptr rbstack, + #define ADJUST_STACK_TO_NEXT(RBSTACK, NEXT) do { \ +  struct rb_node_hdr *node = RBSTACK_PEEK (RBSTACK); \ +  if (NEXT && (!node || \ +  (!(node->flags & RB_THREAD_PREV) && node->prev == NEXT) || \ +  (!(node->flags & RB_THREAD_NEXT) && node->next == NEXT))) { \ +  RBSTACK_PUSH (RBSTACK, NEXT); \ +  while (!(NEXT->flags & RB_THREAD_PREV)) \ +  RBSTACK_PUSH (RBSTACK, NEXT = NEXT->prev); \ +  } \ +  else \ +  while (node != NEXT) { \ +  RBSTACK_POP (RBSTACK, node); \ +  assert (!NEXT || node != NULL); \ +  node = RBSTACK_PEEK (RBSTACK); \ +  } \ +  } while (0) +  + /* The node to unlink is the one on top of the stack pointed to by +  * rbstack_ptr. If that node can't be unlinked (i.e. it got two +  * children), another node that can is found and its contents is +  * copied to the first one. Returns the node that actually got +  * unlinked. +  * +  * *rbstack_ptr is freed if keep_rbstack is zero, otherwise it's set +  * to the new stack down to the next node from the one that was +  * removed. +  * +  * See rb_remove_with_move about node_size. */ + struct rb_node_hdr *low_rb_unlink_with_move (struct rb_node_hdr **root, +  struct rbstack_ptr *rbstack_ptr, +  int keep_rbstack,    size_t node_size)   { -  struct rb_node_hdr *node, *parent, *unlink; -  int replace_with; /* 0: none, 1: prev, 2: next */ +  struct rb_node_hdr *node, *parent, *unlink, *next; +  struct rbstack_ptr rbstack = *rbstack_ptr;       RBSTACK_POP (rbstack, node);   #ifdef PIKE_DEBUG    if (!node) fatal ("No node to delete on stack.\n");   #endif -  + #ifdef RB_STATS +  rb_num_deletes++; + #endif    -  if (node->flags & RB_THREAD_PREV) { +  if (node->flags & (RB_THREAD_PREV|RB_THREAD_NEXT)) { +  next = node->next;    unlink = node; -  parent = RBSTACK_PEEK (rbstack); -  replace_with = node->flags & RB_THREAD_NEXT ? 0 : 2; +  DO_SIMPLE_UNLINK (unlink, parent, node);    } -  else if (node->flags & RB_THREAD_NEXT) { -  unlink = node; -  parent = RBSTACK_PEEK (rbstack); -  replace_with = 1; -  } +     else {    /* Node has two subtrees, so we can't delete it. Find another one    * to replace its data with. */ -  INT16 flags; -  parent = node; +  next = parent = node;    RBSTACK_PUSH (rbstack, node);    for (unlink = node->next; !(unlink->flags & RB_THREAD_PREV); unlink = unlink->prev) {    parent = unlink;    RBSTACK_PUSH (rbstack, unlink);    } -  replace_with = 3; +     -  flags = node->flags; +  keep_flags (node,    MEMCPY ((char *) node + OFFSETOF (rb_node_hdr, flags),    (char *) unlink + OFFSETOF (rb_node_hdr, flags), -  node_size - OFFSETOF (rb_node_hdr, flags)); -  node->flags = (node->flags & ~RB_FLAG_MASK) | (flags & RB_FLAG_MASK); +  node_size - OFFSETOF (rb_node_hdr, flags)));       if (parent == node) {    node = unlink->next;    if (unlink->flags & RB_THREAD_NEXT)    parent->flags |= RB_THREAD_NEXT;    else {   #ifdef PIKE_DEBUG    if (!(node->flags & RB_THREAD_PREV))    rb_fatal (parent, "Expected thread prev pointer in %p; "    "tree is unbalanced.\n", node);   #endif    node->prev = unlink->prev;    }    parent->next = node;    } -  +     else    if (unlink->flags & RB_THREAD_NEXT) {    parent->flags |= RB_THREAD_PREV;    parent->prev = node = unlink->prev;    }    else {    node = unlink->next;   #ifdef PIKE_DEBUG    if (!(node->flags & RB_THREAD_PREV))    rb_fatal (parent, "Expected thread prev pointer in %p; "    "tree is unbalanced.\n", node);   #endif    node->prev = unlink->prev;    parent->prev = node;    } -  -  goto unlink_done; +     }    -  DO_SIMPLE_UNLINK (parent, unlink, node, replace_with); -  - unlink_done: -  if (unlink->flags & RB_RED) -  RBSTACK_FREE (rbstack); +  if (unlink->flags & RB_RED) { +  if (!keep_rbstack) RBSTACK_FREE (rbstack); +  }    else -  *tree = rebalance_after_delete (node, rbstack); +  *root = rebalance_after_delete (node, &rbstack, keep_rbstack);    -  +  if (keep_rbstack) ADJUST_STACK_TO_NEXT (rbstack, next); +  *rbstack_ptr = rbstack;    return unlink;   }    - /* Like low_rb_unlink, but relinks the nodes instead of moving over -  * the data. Somewhat slower unless the size of the nodes is large. */ - void low_rb_unlink_without_move (struct rb_node_hdr **tree, -  struct rbstack_ptr rbstack) + #if 0 + struct rb_node_hdr *low_rb_unlink_with_move (struct rb_node_hdr **root, +  struct rbstack_ptr *rbstack_ptr, +  int keep_rbstack, +  size_t node_size)   { -  struct rb_node_hdr *node, *parent, *unlink; -  int unlink_flags, replace_with; /* 0: none, 1: prev, 2: next, 3: fixed already */ +  struct rb_node_hdr *node = RBSTACK_PEEK (*rbstack_ptr), *next = rb_next (node); +  struct rb_node_hdr *unlink = +  real_low_rb_unlink_with_move (root, rbstack_ptr, 1, node_size); +  debug_check_rbstack (*root, *rbstack_ptr); +  if (node != unlink) next = node; +  if (RBSTACK_PEEK (*rbstack_ptr) != next) +  fatal ("Stack got %p on top, but next node is %p.\n", +  RBSTACK_PEEK (*rbstack_ptr), next); +  if (!keep_rbstack) RBSTACK_FREE (*rbstack_ptr); +  return unlink; + } + #endif    -  + /* Like low_rb_unlink_with_move, but relinks the nodes instead of +  * moving over the data. Somewhat slower unless the size of the nodes +  * is large. */ + void low_rb_unlink_without_move (struct rb_node_hdr **root, +  struct rbstack_ptr *rbstack_ptr, +  int keep_rbstack) + { +  struct rb_node_hdr *node, *parent, *unlink, *next; +  struct rbstack_ptr rbstack = *rbstack_ptr; +  int unlink_flags; +     RBSTACK_POP (rbstack, node);   #ifdef PIKE_DEBUG    if (!node) fatal ("No node to delete on stack.\n");   #endif -  + #ifdef RB_STATS +  rb_num_deletes++; + #endif    -  if (node->flags & RB_THREAD_PREV) { +  if (node->flags & (RB_THREAD_PREV|RB_THREAD_NEXT)) { +  next = node->next;    unlink = node; -  parent = RBSTACK_PEEK (rbstack); -  replace_with = node->flags & RB_THREAD_NEXT ? 0 : 2; +  DO_SIMPLE_UNLINK (unlink, parent, node); +  unlink_flags = unlink->flags;    } -  else if (node->flags & RB_THREAD_NEXT) { -  unlink = node; -  parent = RBSTACK_PEEK (rbstack); -  replace_with = 1; -  } +     else {    /* Node has two subtrees, so we can't delete it. Find another one    * to switch places with. */    struct rb_node_hdr *orig_parent = RBSTACK_PEEK (rbstack), *tmp; -  struct rbstack_ptr pos = rbstack; +  struct rbstack_ptr pos;    parent = unlink = node; -  RBSTACK_PUSH (rbstack, 0); +  RBSTACK_PUSH (rbstack, NULL); +  pos = rbstack;    for (node = node->next; !(node->flags & RB_THREAD_PREV); node = node->prev) {    parent = node;    RBSTACK_PUSH (rbstack, node);    } -  pos.slice->stack[pos.ssp] = node; +  RBSTACK_POKE (pos, next = node);    unlink_flags = node->flags;       if (orig_parent) SET_PTR_TO_CHILD (orig_parent, unlink, node, node); -  else *tree = node; +  else *root = node;    for (tmp = unlink->prev; !(tmp->flags & RB_THREAD_NEXT); tmp = tmp->next) {}    tmp->next = node;    node->prev = unlink->prev; -  +     if (parent == unlink) { /* node replaces its parent. */    node->flags = /* Keep the thread next flag. */    (node->flags & ~(RB_FLAG_MASK & ~RB_THREAD_NEXT)) |    (unlink->flags & (RB_FLAG_MASK & ~RB_THREAD_NEXT));    parent = node;    node = parent->next;    } -  +     else {    if (node->flags & RB_THREAD_NEXT) {    parent->flags |= RB_THREAD_PREV;    parent->prev = node;    }    else parent->prev = node->next;    node->flags = (node->flags & ~RB_FLAG_MASK) | (unlink->flags & RB_FLAG_MASK);    node->next = unlink->next;    node = parent->prev;    } -  +  }    -  goto unlink_done; +  if (unlink_flags & RB_RED) { +  if (!keep_rbstack) RBSTACK_FREE (rbstack);    } -  +  else +  *root = rebalance_after_delete (node, &rbstack, keep_rbstack);    -  DO_SIMPLE_UNLINK (parent, unlink, node, replace_with); -  unlink_flags = unlink->flags; +  if (keep_rbstack) ADJUST_STACK_TO_NEXT (rbstack, next); +  *rbstack_ptr = rbstack; + }    - unlink_done: -  if (unlink_flags & RB_RED) -  RBSTACK_FREE (rbstack); -  else -  *tree = rebalance_after_delete (node, rbstack); + #if 0 + void low_rb_unlink_without_move (struct rb_node_hdr **root, +  struct rbstack_ptr *rbstack_ptr, +  int keep_rbstack) + { +  struct rb_node_hdr *node = RBSTACK_PEEK (*rbstack_ptr), *next = rb_next (node); +  real_low_rb_unlink_without_move (root, rbstack_ptr, 1); +  debug_check_rbstack (*root, *rbstack_ptr); +  if (RBSTACK_PEEK (*rbstack_ptr) != next) +  fatal ("Stack got %p on top, but next node is %p.\n", +  RBSTACK_PEEK (*rbstack_ptr), next); +  if (!keep_rbstack) RBSTACK_FREE (*rbstack_ptr);   } -  + #endif    - /* Returns the node in the tree. It might not be the passed new node -  * if an equal one was found. */ - struct rb_node_hdr *low_rb_insert (struct rb_node_hdr **tree, -  low_rb_cmp_fn *cmp_fn, void *key, + #ifdef ENABLE_UNUSED_RB_FUNCTIONS + /* These functions are disabled since they aren't used from anywhere. +  * If you like to use them, just let me know (they are tested). /mast */ +  + /* Does not replace an existing equal node. Returns the node in the +  * tree. It might not be the passed new node if an equal one was +  * found. */ + struct rb_node_hdr *rb_insert (struct rb_node_hdr **root, +  rb_find_fn *find_fn, void *key,    struct rb_node_hdr *new)   { -  if (*tree) { -  struct rb_node_hdr *node = *tree; +  if (*root) { +  struct rb_node_hdr *node = *root;    RBSTACK_INIT (rbstack); -  LOW_RB_TRACK (rbstack, node, cmp_res = cmp_fn (key, node), { -  low_rb_link_at_next (tree, rbstack, new); /* Got less. */ +  LOW_RB_TRACK ( +  rbstack, node, +  cmp_res = find_fn (key, node), +  { /* Got less. */ +  low_rb_link_at_next (root, rbstack, new);    }, { /* Got equal - new not used. */    RBSTACK_FREE (rbstack);    return node;    }, { /* Got greater. */ -  low_rb_link_at_prev (tree, rbstack, new); +  low_rb_link_at_prev (root, rbstack, new);    });    }    else {    low_rb_init_root (new); -  *tree = new; +  *root = new;    }    return new;   }    - void low_rb_add (struct rb_node_hdr **tree, -  low_rb_cmp_fn *cmp_fn, void *key, + void rb_add (struct rb_node_hdr **root, +  rb_find_fn *find_fn, void *key,    struct rb_node_hdr *new)   { -  if (*tree) { -  struct rb_node_hdr *node = *tree; +  if (*root) { +  struct rb_node_hdr *node = *root;    RBSTACK_INIT (rbstack); -  LOW_RB_TRACK (rbstack, node, cmp_res = cmp_fn (key, node), { -  low_rb_link_at_next (tree, rbstack, new); /* Got less. */ -  }, { /* Got equal. */ -  if (node->flags & RB_THREAD_PREV) -  low_rb_link_at_prev (tree, rbstack, new); -  else if (node->flags & RB_THREAD_NEXT) -  low_rb_link_at_next (tree, rbstack, new); -  else { -  node = node->next; -  RBSTACK_PUSH (rbstack, node); -  while (!(node->flags & RB_THREAD_PREV)) { -  node = node->prev; -  RBSTACK_PUSH (rbstack, node); +  LOW_RB_TRACK_NEQ ( +  rbstack, node, cmp_res = find_fn (key, node) >= 0 ? 1 : -1, +  low_rb_link_at_next (root, rbstack, new), /* Got less or equal. */ +  low_rb_link_at_prev (root, rbstack, new) /* Got greater. */ +  );    } -  low_rb_link_at_prev (tree, rbstack, new); -  } -  }, { -  low_rb_link_at_prev (tree, rbstack, new); /* Got greater. */ -  }); -  } +     else {    low_rb_init_root (new); -  *tree = new; +  *root = new;    }   }    - void low_rb_add_after (struct rb_node_hdr **tree, -  low_rb_cmp_fn *cmp_fn, void *key, + void rb_add_after (struct rb_node_hdr **root, +  rb_find_fn *find_fn, void *key,    struct rb_node_hdr *new,    struct rb_node_hdr *existing)   { -  if (*tree) { -  struct rb_node_hdr *node = *tree; +  if (*root) { +  struct rb_node_hdr *node = *root;    RBSTACK_INIT (rbstack);       if (existing) { - #ifdef PIKE_DEBUG -  if (cmp_fn (key, existing)) fatal ("Given key doesn't match the existing node.\n"); - #endif -  LOW_RB_TRACK (rbstack, node, cmp_res = cmp_fn (key, node) > 0 ? 1 : -1, ;, ;, ;); +  LOW_RB_TRACK_NEQ ( +  rbstack, node, cmp_res = find_fn (key, node) >= 0 ? 1 : -1, ;, ;);    while (node != existing) { -  LOW_RB_TRACK_NEXT (rbstack, node); +  LOW_RB_TRACK_PREV (rbstack, node);   #ifdef PIKE_DEBUG    if (!node) fatal ("Tree doesn't contain the existing node.\n");   #endif    }    if (node->flags & RB_THREAD_NEXT) { -  low_rb_link_at_next (tree, rbstack, new); +  low_rb_link_at_next (root, rbstack, new);    return;    }    else node = node->next;    }       /* Link at lowest position. */    RBSTACK_PUSH (rbstack, node);    while (!(node->flags & RB_THREAD_PREV)) {    node = node->prev;    RBSTACK_PUSH (rbstack, node);    } -  low_rb_link_at_prev (tree, rbstack, new); +  low_rb_link_at_prev (root, rbstack, new);    }       else {   #ifdef PIKE_DEBUG    if (existing) fatal ("Tree doesn't contain the existing node.\n");   #endif    low_rb_init_root (new); -  *tree = new; +  *root = new;    }   }    - /* Returns the node to free, if any. node_size is the size of the -  * whole node, including rb_node_hdr. It's used to move the data from -  * one node to another if the original node can't be removed. */ - struct rb_node_hdr *low_rb_delete (struct rb_node_hdr **tree, -  low_rb_cmp_fn *cmp_fn, void *key, -  size_t node_size) + /* Returns the node to free, if any. Note that rb_remove_with_move +  * is usually faster if the node size is small. */ + struct rb_node_hdr *rb_remove (struct rb_node_hdr **root, +  rb_find_fn *find_fn, void *key)   { -  struct rb_node_hdr *node = *tree; +  struct rb_node_hdr *node = *root;    if (node) {    RBSTACK_INIT (rbstack); -  LOW_RB_TRACK (rbstack, node, -  cmp_res = cmp_fn (key, node), -  ;, /* Got less. */ -  return low_rb_unlink (tree, rbstack, node_size), /* Got equal. */ -  ;); /* Got greater. */ +  LOW_RB_TRACK ( +  rbstack, node, +  cmp_res = find_fn (key, node), +  { /* Got less. */ +  }, { /* Got equal. */ +  low_rb_unlink_without_move (root, rbstack); +  return node; +  }, { /* Got greater. */ +  });    RBSTACK_FREE (rbstack);    } -  return 0; +  return NULL;   }    - /* Like low_rb_delete, but relinks the nodes instead of moving the -  * data, when that's necessary. Somewhat slower unless the size of the -  * nodes is large. */ - struct rb_node_hdr *low_rb_delete_without_move (struct rb_node_hdr **tree, -  low_rb_cmp_fn *cmp_fn, void *key) + void rb_remove_node (struct rb_node_hdr **root, +  rb_find_fn *find_fn, void *key, +  struct rb_node_hdr *to_delete)   { -  struct rb_node_hdr *node = *tree; +  struct rb_node_hdr *node = *root; +  RBSTACK_INIT (rbstack); + #ifdef PIKE_DEBUG +  if (!node) fatal ("Tree is empty.\n"); + #if 0 +  if (find_fn (key, to_delete)) +  fatal ("Given key doesn't match the node to delete.\n"); + #endif + #endif +  LOW_RB_TRACK_NEQ (rbstack, node, cmp_res = find_fn (key, node) >= 0 ? 1 : -1, ;, ;); +  while (node != to_delete) { +  LOW_RB_TRACK_PREV (rbstack, node); + #ifdef PIKE_DEBUG +  if (!node) fatal ("Tree doesn't contain the node to delete.\n"); + #endif +  } +  low_rb_unlink_without_move (root, rbstack); + } +  + /* Variant of rb_remove that moves the contents of another node to the +  * one to be removed if it can't be unlinked. node_size is the size of +  * the whole node, including rb_node_hdr. If cleanup_fn isn't NULL, +  * it's called for the node whose contents is removed, while the node +  * being returned is the one to actually free. */ + struct rb_node_hdr *rb_remove_with_move (struct rb_node_hdr **root, +  rb_find_fn *find_fn, void *key, +  size_t node_size, +  rb_free_fn *cleanup_fn, +  void *cleanup_fn_extra) + { +  struct rb_node_hdr *node = *root;    if (node) {    RBSTACK_INIT (rbstack); -  LOW_RB_TRACK (rbstack, node, -  cmp_res = cmp_fn (key, node), -  ;, /* Got less. */ -  low_rb_unlink_without_move (tree, rbstack); /* Got equal. */ -  return node;, -  ;); /* Got greater. */ +  LOW_RB_TRACK ( +  rbstack, node, +  cmp_res = find_fn (key, node), +  { /* Got less. */ +  }, { /* Got equal. */ +  if (cleanup_fn) cleanup_fn (node, cleanup_fn_extra); +  return low_rb_unlink_with_move (root, rbstack, node_size); +  }, { /* Got greater. */ +  });    RBSTACK_FREE (rbstack);    } -  return 0; +  return NULL;   }    - /* Returns the node to actually free. See low_rb_delete about + /* Returns the node to actually free. See rb_remove about    * node_size. */ - struct rb_node_hdr *low_rb_delete_node (struct rb_node_hdr **tree, -  low_rb_cmp_fn *cmp_fn, void *key, + struct rb_node_hdr *rb_remove_node_with_move (struct rb_node_hdr **root, +  rb_find_fn *find_fn, void *key,    struct rb_node_hdr *to_delete,    size_t node_size)   { -  struct rb_node_hdr *node = *tree; +  struct rb_node_hdr *node = *root;    RBSTACK_INIT (rbstack);   #ifdef PIKE_DEBUG    if (!node) fatal ("Tree is empty.\n"); -  if (cmp_fn (key, to_delete)) fatal ("Given key doesn't match the node to delete.\n"); + #if 0 +  if (find_fn (key, to_delete)) +  fatal ("Given key doesn't match the node to delete.\n");   #endif -  LOW_RB_TRACK (rbstack, node, cmp_res = cmp_fn (key, node) > 0 ? 1 : -1, ;, ;, ;); + #endif +  LOW_RB_TRACK_NEQ (rbstack, node, cmp_res = find_fn (key, node) >= 0 ? 1 : -1, ;, ;);    while (node != to_delete) { -  LOW_RB_TRACK_NEXT (rbstack, node); +  LOW_RB_TRACK_PREV (rbstack, node);   #ifdef PIKE_DEBUG    if (!node) fatal ("Tree doesn't contain the node to delete.\n");   #endif    } -  return low_rb_unlink (tree, rbstack, node_size); +  return low_rb_unlink_with_move (root, rbstack, node_size);   }    - void low_rb_delete_node_without_move (struct rb_node_hdr **tree, -  low_rb_cmp_fn *cmp_fn, void *key, -  struct rb_node_hdr *to_delete) + struct rb_node_hdr *rb_find_eq (struct rb_node_hdr *root, +  rb_find_fn *find_fn, void *key)   { -  struct rb_node_hdr *node = *tree; -  RBSTACK_INIT (rbstack); +  if (root) +  LOW_RB_FIND (root, cmp_res = find_fn (key, root), ;, return root, ;); +  return NULL; + } +  + struct rb_node_hdr *rb_find_lt (struct rb_node_hdr *root, +  rb_find_fn *find_fn, void *key) + { +  if (root) +  LOW_RB_FIND_NEQ (root, cmp_res = find_fn (key, root) <= 0 ? -1 : 1, +  return root, return root->prev); +  return NULL; + } +  + struct rb_node_hdr *rb_find_gt (struct rb_node_hdr *root, +  rb_find_fn *find_fn, void *key) + { +  if (root) +  LOW_RB_FIND_NEQ (root, cmp_res = find_fn (key, root) >= 0 ? 1 : -1, +  return root->next, return root); +  return NULL; + } +  + struct rb_node_hdr *rb_find_le (struct rb_node_hdr *root, +  rb_find_fn *find_fn, void *key) + { +  if (root) +  LOW_RB_FIND_NEQ (root, cmp_res = find_fn (key, root) >= 0 ? 1 : -1, +  return root, return root->prev); +  return NULL; + } +  + struct rb_node_hdr *rb_find_ge (struct rb_node_hdr *root, +  rb_find_fn *find_fn, void *key) + { +  if (root) +  LOW_RB_FIND_NEQ (root, cmp_res = find_fn (key, root) <= 0 ? -1 : 1, +  return root->next, return root); +  return NULL; + } +  + #endif /* ENABLE_UNUSED_RB_FUNCTIONS */ +  + struct rb_node_hdr *rb_get_nth (struct rb_node_hdr *root, size_t n) + {   #ifdef PIKE_DEBUG -  if (!node) fatal ("Tree is empty.\n"); -  if (cmp_fn (key, to_delete)) fatal ("Given key doesn't match the node to delete.\n"); +  size_t index = n;   #endif -  LOW_RB_TRACK (rbstack, node, cmp_res = cmp_fn (key, node) > 0 ? 1 : -1, ;, ;, ;); -  while (node != to_delete) { -  LOW_RB_TRACK_NEXT (rbstack, node); +  if ((root = rb_first (root))) { +  while (n) { +  n--; +  root = rb_next (root);   #ifdef PIKE_DEBUG -  if (!node) fatal ("Tree doesn't contain the node to delete.\n"); +  if (!root) goto tree_too_small;   #endif    } -  low_rb_unlink_without_move (tree, rbstack); +  return root;    } -  + tree_too_small: + #ifdef PIKE_DEBUG +  fatal ("Tree too small for index %"PRINTSIZET"u.\n", index); + #endif +  return (struct rb_node_hdr *) (ptrdiff_t) -1; + }    - struct rb_node_hdr *low_rb_copy (struct rb_node_hdr *source, -  low_rb_copy_fn *copy_node_fn) + size_t rb_sizeof (struct rb_node_hdr *root)   { -  +  size_t size = 0; +  if ((root = rb_first (root))) { +  do { +  size++; +  } while ((root = rb_next (root))); +  } +  return size; + } +  + #ifdef ENABLE_UNUSED_RB_FUNCTIONS +  + void rb_free (struct rb_node_hdr *root, rb_free_fn *free_node_fn, void *extra) + { +  if ((root = rb_first (root))) { +  struct rb_node_hdr *next; +  do { +  next = rb_next (root); +  free_node_fn (root, extra); +  root = next; +  } while (root); +  } + } +  + int rb_equal (struct rb_node_hdr *a, struct rb_node_hdr *b, +  rb_equal_fn *node_equal_fn, void *extra) + { +  a = rb_first (a); +  b = rb_first (b); +  while (a && b) { +  if (!node_equal_fn (a, b, extra)) return 0; +  a = rb_next (a); +  b = rb_next (b); +  } +  return !(a || b); + } +  + struct rb_node_hdr *rb_copy (struct rb_node_hdr *source, +  rb_copy_fn *copy_node_fn, void *extra) + {    if (source) { -  struct rb_node_hdr *copy, *target, *new, *t_prev_tgt = 0, *t_next_src = 0; +  struct rb_node_hdr *copy, *target, *new, *t_prev_tgt = NULL, *t_next_src = NULL;    RBSTACK_INIT (s_stack);    RBSTACK_INIT (t_stack);    -  copy = target = copy_node_fn (source); +  copy = target = copy_node_fn (source, extra);    copy->flags = (copy->flags & ~RB_FLAG_MASK) | (source->flags & RB_FLAG_MASK);    -  LOW_RB_TRAVERSE (1, s_stack, source, { /* Push. */ +  LOW_RB_TRAVERSE ( +  1, s_stack, source, +  { /* Push. */    }, { /* prev is leaf. */    target->prev = t_prev_tgt;    }, { /* prev is subtree. */ -  new = target->prev = copy_node_fn (source->prev); +  new = target->prev = copy_node_fn (source->prev, extra);    new->flags = (new->flags & ~RB_FLAG_MASK) | (source->prev->flags & RB_FLAG_MASK);    RBSTACK_PUSH (t_stack, target);    target = new;    }, { /* Between. */    t_prev_tgt = target;    if (t_next_src) {    t_next_src->next = target; -  t_next_src = 0; +  t_next_src = NULL;    }    }, { /* next is leaf. */    t_next_src = target;    }, { /* next is subtree. */ -  new = target->next = copy_node_fn (source->next); +  new = target->next = copy_node_fn (source->next, extra);    new->flags = (new->flags & ~RB_FLAG_MASK) | (source->next->flags & RB_FLAG_MASK);    RBSTACK_PUSH (t_stack, target);    target = new;    }, { /* Pop. */    RBSTACK_POP (t_stack, target);    });    -  if (t_next_src) t_next_src->next = 0; +  if (t_next_src) t_next_src->next = NULL;    return copy;    } -  else return 0; +  else return NULL;   }    - void low_rb_free (struct rb_node_hdr *tree, low_rb_free_fn *free_node_fn) - { -  RBSTACK_INIT (rbstack); -  LOW_RB_TRAVERSE (1, rbstack, tree, ;, ;, ;, ;, ;, ;, free_node_fn (tree)); - } + #endif /* ENABLE_UNUSED_RB_FUNCTIONS */    - struct rb_node_hdr *low_rb_find_eq (struct rb_node_hdr *tree, -  low_rb_cmp_fn *cmp_fn, void *key) + /* Converts a tree to an ordered list of nodes linked by the next +  * pointers. */ + struct rb_node_hdr *rb_make_list (struct rb_node_hdr *tree)   { -  +  struct rb_node_hdr *list = tree = rb_first (tree);    if (tree) -  LOW_RB_FIND (tree, cmp_res = cmp_fn (key, tree), ;, return tree, ;); -  return 0; +  while ((tree = tree->next = rb_next (tree))) {} +  return list;   }    - struct rb_node_hdr *low_rb_find_lt (struct rb_node_hdr *tree, -  low_rb_cmp_fn *cmp_fn, void *key) + /* Converts a list of nodes linked by the next pointers to a perfectly +  * balanced tree (with a minimum of black nodes). Time complexity is +  * O(length). Returns the tree root. */ + struct rb_node_hdr *rb_make_tree (struct rb_node_hdr *list, size_t length)   { -  if (tree) -  LOW_RB_FIND (tree, cmp_res = cmp_fn (key, tree) > 0 ? 1 : -1, -  return tree, ;, return tree->prev); -  return 0; - } +  struct rb_node_hdr *root = NULL;    - struct rb_node_hdr *low_rb_find_gt (struct rb_node_hdr *tree, -  low_rb_cmp_fn *cmp_fn, void *key) +  if (length > 1) { +  unsigned depth, *top_idx; +  size_t deep_end, count = 0, start = 0, idx = 1; +  struct rb_node_hdr **stack, *prev_tgt = NULL, *next_src = NULL; +  int stack_malloced = 0; +     { -  if (tree) -  LOW_RB_FIND (tree, cmp_res = cmp_fn (key, tree) >= 0 ? 1 : -1, -  return tree->next, ;, return tree); -  return 0; +  unsigned l = 0, h = sizeof (length) * CHAR_BIT; +  do { +  depth = (l + h) >> 1; +  if (1u << depth <= length) l = depth + 1; else h = depth; +  } while (l < h); +  if (1u << depth <= length) depth++;    }    - struct rb_node_hdr *low_rb_find_le (struct rb_node_hdr *tree, -  low_rb_cmp_fn *cmp_fn, void *key) +  deep_end = (length - (1 << (depth - 1))) << 1; +  if (!(length & 1)) deep_end |= 1; +     { -  if (tree) -  LOW_RB_FIND (tree, cmp_res = cmp_fn (key, tree) >= 0 ? 1 : -1, -  return tree, ;, return tree->prev); -  return 0; +  size_t stack_size = +  depth * sizeof (struct rb_node_hdr *) + +  (depth + 1) * sizeof (unsigned); + #ifdef HAVE_ALLOCA +  stack = alloca (stack_size); +  if (!stack) + #endif +  { +  stack = xalloc (stack_size); +  stack_malloced = 1;    } -  +  top_idx = (unsigned *) stack + depth; +  memset (top_idx, 0, (depth + 1) * sizeof (unsigned)); +  depth--; +  }    - struct rb_node_hdr *low_rb_find_ge (struct rb_node_hdr *tree, -  low_rb_cmp_fn *cmp_fn, void *key) - { -  if (tree) -  LOW_RB_FIND (tree, cmp_res = cmp_fn (key, tree) > 0 ? 1 : -1, -  return tree->next, ;, return tree); -  return 0; +  while (count < length) { +  struct rb_node_hdr *next; + #ifdef PIKE_DEBUG +  if (!list) fatal ("Premature end of list at %"PRINTSIZET"u, " +  "expected %"PRINTSIZET"u.\n", count, length); + #endif +  next = list->next; +  +  if (idx > start) { /* Leaf node. */ +  idx = start; +  list->flags |= RB_THREAD_PREV|RB_THREAD_NEXT; +  next_src = stack[idx] = list; +  list->prev = prev_tgt;    } -  +  else { /* Interior node. */ +  idx = top_idx[idx]; +  list->flags &= ~(RB_THREAD_PREV|RB_THREAD_NEXT); +  prev_tgt = next_src->next = stack[idx] = list; +  list->prev = stack[idx - 1]; +  if (idx == depth) { +  assert (!root); +  root = list; +  } +  }    - void init_rbtree() - { +  if (count++ == deep_end) { +  if (idx) { /* Interior node is "half a leaf". */ +  next_src = list; +  list->flags |= RB_THREAD_NEXT; +  } +  start = 1; +  } +  +  if (idx & 1) list->flags &= ~RB_RED; +  else list->flags |= RB_RED; +  +  if (top_idx[idx] == top_idx[idx + 1]) +  top_idx[idx] = idx + 1; +  else { +  top_idx[idx] = top_idx[idx + 1]; +  stack[idx + 1]->next = list; +  } +  +  list = next; +  } +  +  next_src->next = NULL; +  root->flags &= ~RB_RED; +  if (stack_malloced) xfree (stack); +  } +  else if (length) { +  root = list;   #ifdef PIKE_DEBUG -  union rb_node test; -  test.h.flags = 0; -  test.i.ind.type = (1 << 8) - 1; -  test.i.ind.subtype = (1 << 16) - 1; -  test.i.ind.u.refs = (INT32 *) (ptrdiff_t) -1; -  if (test.h.flags & (RB_IND_FLAG_MASK)) -  fatal ("The ind svalue overlays the flags field in an unexpected way.\n"); -  test.h.flags |= RB_FLAG_MASK; -  if (test.i.ind.type & RB_FLAG_MARKER) -  fatal ("The ind svalue overlays the flags field in an unexpected way.\n"); -  test.i.ind.type |= RB_FLAG_MARKER; -  if ((test.i.ind.type & ~RB_IND_FLAG_MASK) != (1 << 8) - 1) -  fatal ("The ind svalue overlays the flags field in an unexpected way.\n"); +  list = list->next;   #endif -  init_rb_node_ind_blocks(); -  init_rb_node_indval_blocks(); +  low_rb_init_root (root);    }    - /* Pike may exit without calling this. */ - void exit_rbtree() + #ifdef PIKE_DEBUG +  if (list) fatal ("List longer than expected %d.\n", length); + #endif +  return root; + } +  + #ifdef ENABLE_UNUSED_RB_FUNCTIONS +  + struct linear_merge_cleanup   { -  free_all_rb_node_ind_blocks(); -  free_all_rb_node_indval_blocks(); +  int operation; +  struct rb_node_hdr *a, *b, *res; +  rb_merge_free_fn *free_node_fn; +  void *extra; + }; +  + static void do_linear_merge_cleanup (struct linear_merge_cleanup *s) + { +  struct rb_node_hdr *node, *next; +  rb_merge_free_fn *free_node_fn = s->free_node_fn; +  void *extra = s->extra; +  +  for (node = s->res; node; node = next) { +  next = node->next; +  free_node_fn (node, extra, MERGE_TREE_RES);    }    - #if defined (PIKE_DEBUG) || defined (TEST_RBTREE) +  if (s->operation & PIKE_MERGE_DESTR_A) +  for (node = s->a; node; node = next) { +  next = rb_prev (node); +  free_node_fn (node, extra, MERGE_TREE_A); +  }    - static void debug_dump_ind_data (struct rb_node_ind *node) +  if (s->operation & PIKE_MERGE_DESTR_B) +  for (node = s->b; node; node = next) { +  next = rb_prev (node); +  free_node_fn (node, extra, MERGE_TREE_B); +  } + } +  + /* Merges two trees in linear time (no more, no less). The operation +  * argument describes the way to merge, like the one given to merge() +  * in array.c. Two extra bits, PIKE_MERGE_DESTR_A and +  * PIKE_MERGE_DESTR_B, tells whether or not to be destructive on the +  * two passed trees. If destructive on a tree, the remaining nodes in +  * it will be freed with free_node_fn, otherwise copy_node_fn will be +  * used to copy nodes from it to the result. The two callbacks will +  * receive either MERGE_TREE_A or MERGE_TREE_B as the third arg, +  * depending on whether the node argument comes from tree a or tree b, +  * respectively. free_node_fn might also get MERGE_TREE_RES to clean +  * up any newly created node copies in the result, in case of +  * exception. +  * +  * The function returns a list linked by the next pointers, and if +  * length is nonzero it'll write the length of the list there. The +  * caller can then use these as arguments to rb_make_tree. +  * +  * NB: It doesn't handle making duplicates of the same node, i.e. +  * PIKE_ARRAY_OP_A without PIKE_ARRAY_OP_SKIP_A. Not a problem since +  * none of the currently defined operations use that. +  * +  * NB: It's assumed that a and b aren't part of the same tree. */ + struct rb_node_hdr *rb_linear_merge ( +  struct rb_node_hdr *a, struct rb_node_hdr *b, int operation, +  rb_cmp_fn *cmp_fn, void *cmp_fn_extra, +  rb_merge_copy_fn *copy_node_fn, void *copy_fn_extra, +  rb_merge_free_fn *free_node_fn, void *free_fn_extra, +  size_t *length)   { -  struct svalue tmp; -  print_svalue (stderr, use_rb_node_ind (node, tmp)); -  fputc (' ', stderr); +  struct rb_node_hdr *n; +  size_t len = 0; +  int cmp_res, op; +  struct linear_merge_cleanup s; +  ONERROR uwp; +  +  s.operation = operation; +  s.free_node_fn = free_node_fn; +  s.extra = free_fn_extra; +  SET_ONERROR (uwp, do_linear_merge_cleanup, &s); +  +  LOW_RB_MERGE ( +  x, s.a, s.b, s.res, len, operation, ;, ;, +  { +  cmp_res = cmp_fn (s.a, s.b, cmp_fn_extra); +  }, { /* Copy a. */ +  new_node = operation & PIKE_MERGE_DESTR_A ? s.a : +  copy_node_fn (s.a, copy_fn_extra, MERGE_TREE_A); +  }, { /* Free a. */ +  if (operation & PIKE_MERGE_DESTR_A) +  free_node_fn (s.a, free_fn_extra, MERGE_TREE_A); +  }, { /* Copy b. */ +  new_node = operation & PIKE_MERGE_DESTR_B ? s.b : +  copy_node_fn (s.b, copy_fn_extra, MERGE_TREE_B); +  }, { /* Free b. */ +  if (operation & PIKE_MERGE_DESTR_B) +  free_node_fn (s.b, free_fn_extra, MERGE_TREE_B); +  }); +  +  UNSET_ONERROR (uwp); +  +  if (length) *length = len; +  return s.res;   }    - static void debug_dump_indval_data (struct rb_node_indval *node) + #endif /* ENABLE_UNUSED_RB_FUNCTIONS */ +  + #ifdef RB_STATS +  + size_t rb_num_finds = 0, rb_find_depth = 0; + size_t rb_num_sidesteps = 0, rb_num_sidestep_ops = 0; + size_t rb_num_tracks = 0, rb_track_depth = 0; + size_t rb_num_sidetracks = 0, rb_num_sidetrack_ops = 0; + size_t rb_max_depth = 0; + size_t rb_num_traverses = 0, rb_num_traverse_ops = 0; + size_t rbstack_slice_allocs = 0; + size_t rb_num_adds = 0, rb_add_rebalance_cnt = 0; + size_t rb_num_deletes = 0, rb_del_rebalance_cnt = 0; +  + void reset_rb_stats()   { -  struct svalue tmp; -  print_svalue (stderr, use_rb_node_ind (node, tmp)); -  fputs (": ", stderr); -  print_svalue (stderr, &node->val); -  fputc (' ', stderr); +  rb_num_sidesteps = rb_num_sidestep_ops = 0; +  rb_num_finds = rb_find_depth = 0; +  rb_num_tracks = rb_track_depth = 0; +  rb_num_sidetracks = rb_num_sidetrack_ops = 0; +  rb_max_depth = 0; +  rb_num_traverses = rb_num_traverse_ops = 0; +  rbstack_slice_allocs = 0; +  rb_num_adds = rb_add_rebalance_cnt = 0; +  rb_num_deletes = rb_del_rebalance_cnt = 0;   }    - static void debug_dump_rb_tree (struct rb_node_hdr *tree, dump_data_fn *dump_data) + void print_rb_stats (int reset)   { -  if (tree) { -  struct rb_node_hdr *n; +  fprintf (stderr, +  "rbtree stats:\n" +  " number of finds . . . . . . %10"PRINTSIZET"u\n" +  " avg find depth . . . . . . . %10.2f\n" +  " number of sidesteps . . . . %10"PRINTSIZET"u\n" +  " avg ops per sidestep . . . . %10.2f\n" +  " number of tracks . . . . . . %10"PRINTSIZET"u\n" +  " avg track depth . . . . . . %10.2f\n" +  " number of sidetracks . . . . %10"PRINTSIZET"u\n" +  " avg ops per sidetrack . . . %10.2f\n" +  " maximum depth . . . . . . . %10"PRINTSIZET"u\n" +  " number of traverses . . . . %10"PRINTSIZET"u\n" +  " avg ops per traverse . . . . %10.2f\n" +  " allocated stack slices . . . %10"PRINTSIZET"u\n" +  " number of adds . . . . . . . %10"PRINTSIZET"u\n" +  " avg add rebalance count . . %10.2f\n" +  " number of deletes . . . . . %10"PRINTSIZET"u\n" +  " avg delete rebalance count . %10.2f\n", +  rb_num_finds, +  (double) rb_find_depth / rb_num_finds, +  rb_num_sidesteps, +  (double) rb_num_sidestep_ops / rb_num_sidesteps, +  rb_num_tracks, +  (double) rb_track_depth / rb_num_tracks, +  rb_num_sidetracks, +  (double) rb_num_sidetrack_ops / rb_num_sidetracks, +  rb_max_depth, +  rb_num_traverses, +  (double) rb_num_traverse_ops / rb_num_traverses, +  rbstack_slice_allocs, +  rb_num_adds, +  (double) rb_add_rebalance_cnt / rb_num_adds, +  rb_num_deletes, +  (double) rb_del_rebalance_cnt / rb_num_deletes); +  if (reset) reset_rb_stats(); + } +  + #endif /* RB_STATS */ +  + #if defined (PIKE_DEBUG) || defined (TEST_MULTISET) +  + void debug_dump_rb_tree (struct rb_node_hdr *root, dump_data_fn *dump_data, +  void *extra) + { +  if (root) { +  struct rb_node_hdr *node = root; +  struct rb_node_hdr *n, *n2;    struct rbstack_ptr p; -  +  size_t depth = 0;    RBSTACK_INIT (rbstack);    -  LOW_RB_TRAVERSE (1, rbstack, tree, { /* Push. */ +  LOW_RB_TRAVERSE ( +  1, rbstack, node, +  { /* Push. */ +  depth++;    p = rbstack;    RBSTACK_UP (p, n);    while (n) { -  if (n == tree) { -  fprintf (stderr, "[Circular! %p]", tree); +  if (n == node) { +  fprintf (stderr, "[Circular %p]", node);    goto skip_node;    }    RBSTACK_UP (p, n);    }    fputc ('(', stderr);    }, { /* prev is leaf. */ -  fprintf (stderr, "[%p]", tree->prev); +  p = rbstack; +  n2 = node; +  RBSTACK_UP (p, n); +  while (n && (n->flags & RB_THREAD_NEXT || n->next != n2)) { +  n2 = n; +  RBSTACK_UP (p, n); +  } +  if (node->prev != n) +  fprintf (stderr, "[Thread ptr is %p, expected %p] ", node->prev, n);    }, { /* prev is subtree. */ -  if (!tree->prev) { -  fputs ("[Zero subtree!]", stderr); +  if (!node->prev) { +  fputs ("[Zero subtree]", stderr);    goto between_1;    }    }, { /* Between nodes. */ -  fprintf (stderr, " %p/%c", -  tree, tree->flags & RB_RED ? 'R' : 'B'); +  if (!(node->flags & RB_THREAD_PREV)) +  fprintf (stderr, "\n%*s", depth, ""); +  if (node == root) fputs ("root=", stderr); +  fprintf (stderr, "%p/%c", +  node, node->flags & RB_RED ? 'R' : 'B');    if (dump_data) {    fputs (": ", stderr); -  dump_data (tree); +  dump_data (node, extra);    } -  else fputc (' ', stderr); +     }, { /* next is leaf. */ -  fprintf (stderr, "[%p]", tree->next); +  p = rbstack; +  n2 = node; +  RBSTACK_UP (p, n); +  while (n && (n->flags & RB_THREAD_PREV || n->prev != n2)) { +  n2 = n; +  RBSTACK_UP (p, n); +  } +  if (node->next != n) +  fprintf (stderr, " [Thread ptr is %p, expected %p]", node->next, n);    }, { /* next is subtree. */ -  if (!tree->next) { -  fputs ("[Zero subtree!]", stderr); +  if (!node->next) { +  fputs (" [Zero subtree]", stderr);    goto leave_1;    } -  +  fprintf (stderr, "\n%*s", depth, "");    }, { /* Pop. */    fputc (')', stderr); -  +  depth--;    skip_node:    ;    });    fputc ('\n', stderr);    }    else    fprintf (stderr, "(empty tree)\n");   }    - static void debug_rb_fatal (struct rb_node_hdr *tree, const char *fmt, ...) + /* If pos isn't zero, it points to a pointer in the stack that will be +  * marked in the output. */ + void debug_dump_rbstack (struct rbstack_ptr rbstack, struct rbstack_ptr *pos)   { -  va_list args; -  va_start (args, fmt); -  (void) VFPRINTF (stderr, fmt, args); -  fputs ("Dumping tree: ", stderr); -  debug_dump_rb_tree (tree, 0); -  debug_fatal ("\r"); +  struct rb_node_hdr *node, *ptr_node = (void *) (ptrdiff_t) -1; +  RBSTACK_INIT (reversed); +  +  while (1) { +  int here = pos ? pos->ssp == rbstack.ssp && pos->slice == rbstack.slice : 0; +  RBSTACK_UP (rbstack, node); +  if (here) ptr_node = node; +  if (!node) break; +  RBSTACK_PUSH (reversed, node);    }    - static void debug_rb_ind_fatal (struct rb_node_ind *tree, const char *fmt, ...) +  if (pos && !ptr_node) fputs ("* ", stderr); +  +  while (1) { +  RBSTACK_POP (reversed, node); +  if (!node) break; +  fprintf (stderr, "%p ", node); +  if (pos && ptr_node == node) fputs ("* ", stderr); +  } +  +  if (pos && ptr_node == (void *) (ptrdiff_t) -1) +  fputs ("(pos outside stack)", stderr); +  fputc ('\n', stderr); + } +  + static void debug_rb_fatal (struct rb_node_hdr *tree, const char *fmt, ...)   {    va_list args;    va_start (args, fmt);    (void) VFPRINTF (stderr, fmt, args); -  fputs ("Dumping tree: ", stderr); -  debug_dump_rb_tree ((struct rb_node_hdr *) tree, -  (dump_data_fn *) debug_dump_ind_data); +  fputs ("Dumping tree:\n", stderr); +  debug_dump_rb_tree (tree, NULL, NULL);    debug_fatal ("\r");   }    - static void debug_rb_indval_fatal (struct rb_node_indval *tree, const char *fmt, ...) + static void debug_custom_rb_fatal (struct rb_node_hdr *tree, dump_data_fn *dump_data, +  void *extra, const char *fmt, ...)   {    va_list args;    va_start (args, fmt);    (void) VFPRINTF (stderr, fmt, args); -  fputs ("Dumping tree: ", stderr); -  debug_dump_rb_tree ((struct rb_node_hdr *) tree, -  (dump_data_fn *) debug_dump_indval_data); +  fputs ("Dumping tree:\n", stderr); +  debug_dump_rb_tree (tree, dump_data, extra);    debug_fatal ("\r");   }    - void debug_check_rb_tree (struct rb_node_hdr *tree) + void debug_check_rb_tree (struct rb_node_hdr *root, dump_data_fn *dump_data, void *extra)   { -  if (tree) { -  struct rb_node_hdr *node = tree, *n, *n2; +  if (root) { +  struct rb_node_hdr *node = root, *n, *n2;    struct rbstack_ptr p;    size_t blacks = 1, max_blacks = 0, depth = 0;    RBSTACK_INIT (rbstack);    -  if (tree->flags & RB_RED) rb_fatal (tree, "Root node not black.\n"); +  if (root->flags & RB_RED) custom_rb_fatal (root, dump_data, extra, +  "Root node not black.\n");    -  LOW_RB_TRAVERSE (1, rbstack, node, { /* Push. */ +  LOW_RB_TRAVERSE ( +  1, rbstack, node, +  { /* Push. */    depth++;    p = rbstack;    RBSTACK_UP (p, n);    while (n) { -  if (n == node) rb_fatal (tree, "Circular subtrees @ %p.\n", node); +  if (n == node) custom_rb_fatal (root, dump_data, extra, +  "Circular subtrees @ %p.\n", node);    RBSTACK_UP (p, n);    }       if (!(node->flags & RB_RED)) blacks++; -  }, -  { /* prev is leaf. */ +  }, { /* prev is leaf. */    if (max_blacks) {    if (blacks != max_blacks) -  rb_fatal (tree, "Unbalanced tree - leftmost branch is %d, " +  custom_rb_fatal (root, dump_data, extra, +  "Unbalanced tree - leftmost branch is %d, "    "prev @ %p is %d.\n", max_blacks, node, blacks);    }    else max_blacks = blacks;       p = rbstack;    n2 = node;    RBSTACK_UP (p, n);    while (n && (n->flags & RB_THREAD_NEXT || n->next != n2)) {    n2 = n;    RBSTACK_UP (p, n);    }    if (node->prev != n) -  rb_fatal (tree, "Thread prev pointer @ %p is %p, expected %p.\n", +  custom_rb_fatal (root, dump_data, extra, +  "Thread prev pointer @ %p is %p, expected %p.\n",    node, node->prev, n); -  }, -  { /* prev is subtree. */ -  if (!node->prev) rb_fatal (tree, "Subtree prev is null @ %p.\n", node); +  }, { /* prev is subtree. */ +  if (!node->prev) custom_rb_fatal (root, dump_data, extra, +  "Subtree prev is null @ %p.\n", node);    if (node->flags & RB_RED && node->prev->flags & RB_RED) -  rb_fatal (tree, "Red node got red subtree prev node @ %p.\n", node); -  }, -  { /* Between nodes. */ -  }, -  { /* next is leaf. */ +  custom_rb_fatal (root, dump_data, extra, +  "Red node got red subtree prev node @ %p.\n", node); +  }, { /* Between nodes. */ +  }, { /* next is leaf. */    if (blacks != max_blacks) -  rb_fatal (tree, "Unbalanced tree - leftmost branch is %d, " +  custom_rb_fatal (root, dump_data, extra, +  "Unbalanced tree - leftmost branch is %d, "    "next @ %p is %d.\n", max_blacks, node, blacks);       p = rbstack;    n2 = node;    RBSTACK_UP (p, n);    while (n && (n->flags & RB_THREAD_PREV || n->prev != n2)) {    n2 = n;    RBSTACK_UP (p, n);    }    if (node->next != n) -  rb_fatal (tree, "Thread next pointer @ %p is %p, expected %p.\n", +  custom_rb_fatal (root, dump_data, extra, +  "Thread next pointer @ %p is %p, expected %p.\n",    node, node->next, n); -  }, -  { /* next is subtree. */ -  if (!node->next) rb_fatal (tree, "Subtree next is null @ %p.\n", node); +  }, { /* next is subtree. */ +  if (!node->next) custom_rb_fatal (root, dump_data, extra, +  "Subtree next is null @ %p.\n", node);    if (node->flags & RB_RED && node->next->flags & RB_RED) -  rb_fatal (tree, "Red node got red subtree next node @ %p.\n", node); -  }, -  { /* Pop. */ +  custom_rb_fatal (root, dump_data, extra, +  "Red node got red subtree next node @ %p.\n", node); +  }, { /* Pop. */    if (!(node->flags & RB_RED)) blacks--;    depth--;    });    }   }    - #ifdef TEST_RBTREE -  - #define TEST_I_FIND(fn, exp) \ - do { \ -  i_node = PIKE_CONCAT (rb_ind_, fn) (i_tree, sp - 1, 0); \ -  if (!i_node) \ -  rb_ind_fatal (i_tree, #fn " failed to find %d (%d).\n", exp, i); \ -  if (i_node->ind.u.integer != exp) \ -  rb_ind_fatal (i_tree, #fn " failed to find %d - got %d instead (%d).\n", \ -  exp, i_node->ind.u.integer, i); \ -  i_node = PIKE_CONCAT (rb_ind_, fn) (i_tree, sp - 1, less_efun); \ -  if (!i_node) \ -  rb_ind_fatal (i_tree, #fn " failed to find %d with cmp_less (%d).\n", \ -  exp, i); \ -  if (i_node->ind.u.integer != exp) \ -  rb_ind_fatal (i_tree, #fn " failed to find %d with cmp_less - " \ -  "got %d instead (%d).\n", exp, i_node->ind.u.integer, i); \ - } while (0) -  - #define TEST_I_NOT_FIND(fn) \ - do { \ -  i_node = PIKE_CONCAT (rb_ind_, fn) (i_tree, sp - 1, 0); \ -  if (i_node) \ -  rb_ind_fatal (i_tree, #fn " failed to not find %d - got %d (%d).\n", \ -  sp[-1].u.integer, i_node->ind.u.integer, i); \ -  i_node = PIKE_CONCAT (rb_ind_, fn) (i_tree, sp - 1, less_efun); \ -  if (i_node) \ -  rb_ind_fatal (i_tree, #fn " failed to not find %d with cmp_less - " \ -  "got %d (%d).\n", \ -  sp[-1].u.integer, i_node->ind.u.integer, i); \ - } while (0) -  - #define TEST_I_STEP_FIND(fn, dir, exp) \ - do { \ -  i_node = PIKE_CONCAT (rb_ind_, dir) (i_node); \ -  if (!i_node) \ -  rb_ind_fatal (i_tree, "Failed to step " #dir " to %d after " #fn \ -  " of %d (%d).\n", exp, sp[-1].u.integer, i); \ -  if (i_node->ind.u.integer != exp) \ -  rb_ind_fatal (i_tree, "Failed to step " #dir " to %d after " #fn \ -  " of %d - got %d instead (%d).\n", \ -  exp, sp[-1].u.integer, i_node->ind.u.integer, i); \ - } while (0) -  - #define TEST_I_STEP_NOT_FIND(fn, dir) \ - do { \ -  i_node = PIKE_CONCAT (rb_ind_, dir) (i_node); \ -  if (i_node) \ -  rb_ind_fatal (i_tree, "Failed to step " #dir " to end after " #fn \ -  " of %d - got %d (%d).\n", \ -  sp[-1].u.integer, i_node->ind.u.integer, i); \ - } while (0) -  - #define TEST_IV_FIND(fn, exp) \ - do { \ -  iv_node = PIKE_CONCAT (rb_indval_, fn) (iv_tree, sp - 1, 0); \ -  if (!iv_node) \ -  rb_indval_fatal (iv_tree, #fn " failed to find %d (%d).\n", exp, i); \ -  if (iv_node->ind.u.integer != exp) \ -  rb_indval_fatal (iv_tree, #fn " failed to find %d - " \ -  "got %d instead (%d).\n", exp, iv_node->ind.u.integer, i); \ -  iv_node = PIKE_CONCAT (rb_indval_, fn) (iv_tree, sp - 1, less_efun); \ -  if (!iv_node) \ -  rb_indval_fatal (iv_tree, #fn " failed to find %d with cmp_less (%d).\n", \ -  exp, i); \ -  if (iv_node->ind.u.integer != exp) \ -  rb_indval_fatal (iv_tree, #fn " failed to find %d with cmp_less - " \ -  "got %d instead (%d).\n", exp, iv_node->ind.u.integer, i); \ - } while (0) -  - #define TEST_IV_NOT_FIND(fn) \ - do { \ -  iv_node = PIKE_CONCAT (rb_indval_, fn) (iv_tree, sp - 1, 0); \ -  if (iv_node) \ -  rb_indval_fatal (iv_tree, #fn " failed to not find %d - got %d (%d).\n", \ -  sp[-1].u.integer, iv_node->ind.u.integer, i); \ -  iv_node = PIKE_CONCAT (rb_indval_, fn) (iv_tree, sp - 1, less_efun); \ -  if (iv_node) \ -  rb_indval_fatal (iv_tree, #fn " failed to not find %d with cmp_less - " \ -  "got %d (%d).\n", \ -  sp[-1].u.integer, iv_node->ind.u.integer, i); \ - } while (0) -  - #define TEST_IV_STEP_FIND(fn, dir, exp) \ - do { \ -  iv_node = PIKE_CONCAT (rb_indval_, dir) (iv_node); \ -  if (!iv_node) \ -  rb_indval_fatal (iv_tree, "Failed to step " #dir " to %d after " #fn \ -  " of %d (%d).\n", exp, sp[-1].u.integer, i); \ -  if (iv_node->ind.u.integer != exp) \ -  rb_indval_fatal (iv_tree, "Failed to step " #dir " to %d after " #fn \ -  " of %d - got %d instead (%d).\n", \ -  exp, sp[-1].u.integer, iv_node->ind.u.integer, i); \ - } while (0) -  - #define TEST_IV_STEP_NOT_FIND(fn, dir) \ - do { \ -  iv_node = PIKE_CONCAT (rb_indval_, dir) (iv_node); \ -  if (iv_node) \ -  rb_indval_fatal (iv_tree, "Failed to step " #dir " to end after " #fn \ -  " of %d - got %d (%d).\n", \ -  sp[-1].u.integer, iv_node->ind.u.integer, i); \ - } while (0) -  - void test_rbtree() + void debug_check_rbstack (struct rb_node_hdr *root, struct rbstack_ptr rbstack)   { -  size_t i; -  struct svalue *less_efun; -  struct array *a; -  struct rb_node_ind *i_tree = 0, *i_node; -  struct rb_node_indval *iv_tree = 0, *iv_node; +  struct rbstack_ptr rbstack_top = rbstack; +  struct rb_node_hdr *snode, *tnode = root; +  RBSTACK_INIT (reversed);    -  push_svalue (simple_mapping_string_lookup (get_builtin_constants(), "`<")); -  less_efun = sp - 1; -  -  push_int (1); -  push_int (1); -  push_int (2); -  push_int (4); -  push_int (5); -  push_int (5); -  push_int (7); -  push_int (8); -  push_int (11); -  push_int (14); -  push_int (15); -  push_int (15); -  f_aggregate (12); -  -  for (i = 1*2*3*4*5*6*7*8*9; i > 0; i--) { -  int v; -  size_t j; -  -  if (!(i % 1000)) fprintf (stderr, "i %d \r", i); -  -  stack_dup(); -  push_int (i); -  f_permute (2); -  a = sp[-1].u.array; -  -  for (j = 0; j < 12; j++) { -  rb_ind_insert (&i_tree, &a->item[j], 0); -  debug_check_rb_tree (HDR (i_tree)); +  while (1) { +  RBSTACK_UP (rbstack, snode); +  if (!snode) break; +  RBSTACK_PUSH (reversed, snode);    }    -  for (j = 0, v = 0, i_node = rb_ind_first (i_tree); i_node; -  i_node = rb_ind_next (i_node), j++) { -  push_rb_node_ind (i_node); -  if (v >= sp[-1].u.integer) rb_ind_fatal (i_tree, "Failed to sort (%d).\n", i); -  v = sp[-1].u.integer; -  pop_stack(); +  RBSTACK_POP (reversed, snode); +  if (!snode) return; +  if (snode != tnode) { +  fputs ("rbstack: ", stderr); +  debug_dump_rbstack (rbstack_top, NULL); +  rb_fatal (root, "rbstack root %p != tree root.\n", snode);    } -  if (j != 9) rb_ind_fatal (i_tree, "Size of tree is wrong: %d (%d)\n", j, i); +     -  push_int (5); -  TEST_I_FIND (find_eq, 5); -  TEST_I_FIND (find_lt, 4); -  TEST_I_FIND (find_gt, 7); -  TEST_I_FIND (find_le, 5); -  TEST_I_FIND (find_ge, 5); -  pop_stack(); -  -  push_int (6); -  TEST_I_NOT_FIND (find_eq); -  TEST_I_FIND (find_lt, 5); -  TEST_I_FIND (find_gt, 7); -  TEST_I_FIND (find_le, 5); -  TEST_I_FIND (find_ge, 7); -  pop_stack(); -  -  push_int (0); -  TEST_I_NOT_FIND (find_eq); -  TEST_I_NOT_FIND (find_lt); -  TEST_I_FIND (find_gt, 1); -  TEST_I_NOT_FIND (find_le); -  TEST_I_FIND (find_ge, 1); -  pop_stack(); -  -  push_int (1); -  TEST_I_FIND (find_eq, 1); -  TEST_I_NOT_FIND (find_lt); -  TEST_I_FIND (find_gt, 2); -  TEST_I_FIND (find_le, 1); -  TEST_I_FIND (find_ge, 1); -  pop_stack(); -  -  push_int (15); -  TEST_I_FIND (find_eq, 15); -  TEST_I_FIND (find_lt, 14); -  TEST_I_NOT_FIND (find_gt); -  TEST_I_FIND (find_le, 15); -  TEST_I_FIND (find_ge, 15); -  pop_stack(); -  -  push_int (17); -  TEST_I_NOT_FIND (find_eq); -  TEST_I_FIND (find_lt, 15); -  TEST_I_NOT_FIND (find_gt); -  TEST_I_FIND (find_le, 15); -  TEST_I_NOT_FIND (find_ge); -  pop_stack(); -  -  i_node = rb_ind_copy (i_tree); -  debug_check_rb_tree (HDR (i_node)); -  for (j = 0, v = 0; j < 12; j++) { -  v += !!rb_ind_delete (&i_node, &a->item[j], 0); -  debug_check_rb_tree (HDR (i_node)); +  while (1) { +  RBSTACK_POP (reversed, snode); +  if (!snode) break; +  if (!(tnode->flags & RB_THREAD_PREV) && snode == tnode->prev) +  {tnode = snode; continue;} +  if (!(tnode->flags & RB_THREAD_NEXT) && snode == tnode->next) +  {tnode = snode; continue;} +  rb_fatal (root, "rbstack differs from tree: Node %p got no child %p.\n", +  tnode, snode);    } -  if (v != 9 || i_node) -  rb_ind_fatal (i_node, "rb_ind_delete deleted " -  "wrong number of entries: %d (%d)\n", v, i); -  -  rb_ind_free (i_tree), i_tree = 0; -  pop_stack(); +    } -  pop_stack(); +     -  push_int (1); -  push_int (1); -  push_int (4); -  push_int (5); -  push_int (5); -  push_int (7); -  push_int (15); -  push_int (15); -  f_aggregate (8); -  -  for (i = 1*2*3*4*5*6*7*8; i > 0; i--) { -  int v; -  size_t j; -  -  if (!(i % 1000)) fprintf (stderr, "iv %d \r", i); -  -  stack_dup(); -  push_int (i); -  f_permute (2); -  a = sp[-1].u.array; -  -  { -  struct rb_node_indval *nodes[8]; -  push_int (17); -  -  for (j = 0; j < 8; j++) { -  for (iv_node = rb_indval_last (iv_tree); -  iv_node; iv_node = rb_indval_prev (iv_node)) { -  if (iv_node->val.u.integer <= a->item[j].u.integer) break; -  } -  nodes[j] = rb_indval_add_after (&iv_tree, iv_node, sp - 1, &a->item[j], 0); -  debug_check_rb_tree (HDR (iv_tree)); -  } -  -  for (j = 0, v = 0, iv_node = rb_indval_first (iv_tree); -  iv_node; iv_node = rb_indval_next (iv_node), j++) { -  if (v > iv_node->val.u.integer) -  rb_indval_fatal (iv_tree, "Failed to add in order (%d).\n", i); -  v = iv_node->val.u.integer; -  } -  if (j != 8) rb_indval_fatal (iv_tree, "Size of tree is wrong: %d (%d)\n", j, i); -  -  for (j = 0; j < 8; j++) { -  iv_node = rb_indval_delete_node (&iv_tree, nodes[j], 0); -  if (iv_node != nodes[j]) { -  for (v = j + 1;; v++) { -  if (v >= 8) rb_indval_fatal (iv_tree, "rb_indval_delete_node " -  "returned a bogus value.\n"); -  if (iv_node == nodes[v]) break; -  } -  nodes[v] = nodes[j]; -  } -  debug_check_rb_tree (HDR (iv_tree)); -  } -  if (iv_tree) -  rb_indval_fatal (iv_node, "rb_indval_delete_node didn't delete " -  "the whole tree (%d)\n", i); -  -  pop_stack(); -  } -  -  for (j = 0; j < 8; j++) { -  rb_indval_add (&iv_tree, &a->item[j], &a->item[j], 0); -  debug_check_rb_tree (HDR (iv_tree)); -  } -  -  for (j = 0, v = 0, iv_node = rb_indval_first (iv_tree); -  iv_node; iv_node = rb_indval_next (iv_node), j++) { -  push_rb_node_ind (iv_node); -  if (v > sp[-1].u.integer) rb_indval_fatal (iv_tree, "Failed to sort (%d).\n", i); -  v = sp[-1].u.integer; -  pop_stack(); -  } -  if (j != 8) rb_indval_fatal (iv_tree, "Size of tree is wrong: %d (%d)\n", j, i); -  -  push_int (5); -  TEST_IV_FIND (find_eq, 5); -  TEST_IV_FIND (find_lt, 4); -  TEST_IV_FIND (find_gt, 7); -  TEST_IV_FIND (find_le, 5); -  TEST_IV_STEP_FIND (find_le, next, 7); -  TEST_IV_FIND (find_ge, 5); -  TEST_IV_STEP_FIND (find_ge, prev, 4); -  pop_stack(); -  -  push_int (6); -  TEST_IV_NOT_FIND (find_eq); -  TEST_IV_FIND (find_lt, 5); -  TEST_IV_FIND (find_gt, 7); -  TEST_IV_FIND (find_le, 5); -  TEST_IV_STEP_FIND (find_le, next, 7); -  TEST_IV_FIND (find_ge, 7); -  TEST_IV_STEP_FIND (find_ge, prev, 5); -  pop_stack(); -  -  push_int (0); -  TEST_IV_NOT_FIND (find_eq); -  TEST_IV_NOT_FIND (find_lt); -  TEST_IV_FIND (find_gt, 1); -  TEST_IV_STEP_NOT_FIND (find_gt, prev); -  TEST_IV_NOT_FIND (find_le); -  TEST_IV_FIND (find_ge, 1); -  TEST_IV_STEP_FIND (find_ge, next, 1); -  pop_stack(); -  -  push_int (1); -  TEST_IV_FIND (find_eq, 1); -  TEST_IV_NOT_FIND (find_lt); -  TEST_IV_FIND (find_gt, 4); -  TEST_IV_FIND (find_le, 1); -  TEST_IV_STEP_FIND (find_le, next, 4); -  TEST_IV_FIND (find_ge, 1); -  TEST_IV_STEP_NOT_FIND (find_ge, prev); -  pop_stack(); -  -  push_int (15); -  TEST_IV_FIND (find_eq, 15); -  TEST_IV_FIND (find_lt, 7); -  TEST_IV_NOT_FIND (find_gt); -  TEST_IV_FIND (find_le, 15); -  TEST_IV_STEP_NOT_FIND (find_le, next); -  TEST_IV_FIND (find_ge, 15); -  TEST_IV_STEP_FIND (find_ge, prev, 7); -  pop_stack(); -  -  push_int (17); -  TEST_IV_NOT_FIND (find_eq); -  TEST_IV_FIND (find_lt, 15); -  TEST_IV_STEP_NOT_FIND (find_lt, next); -  TEST_IV_NOT_FIND (find_gt); -  TEST_IV_FIND (find_le, 15); -  TEST_IV_STEP_FIND (find_le, prev, 15); -  TEST_IV_NOT_FIND (find_ge); -  pop_stack(); -  -  iv_node = rb_indval_copy (iv_tree); -  debug_check_rb_tree (HDR (iv_node)); -  for (j = 0, v = 0; j < 8; j++) { -  v += !!rb_indval_delete (&iv_node, &a->item[j], 0); -  debug_check_rb_tree (HDR (iv_node)); -  } -  if (v != 8 || iv_node) -  rb_indval_fatal (iv_node, "rb_indval_delete deleted " -  "wrong number of entries: %d (%d)\n", v, i); -  -  for (v = 0; iv_tree; v++) { -  rb_indval_delete_node (&iv_tree, iv_tree, 0); -  debug_check_rb_tree (HDR (iv_tree)); -  } -  if (v != 8) -  rb_indval_fatal (iv_node, "rb_indval_delete_node deleted " -  "wrong number of entries: %d (%d)\n", v, i); -  -  pop_stack(); -  } -  pop_stack(); -  -  pop_stack(); -  fprintf (stderr, " \r"); - } -  - #endif /* TEST_RBTREE */ -  - #endif /* PIKE_DEBUG */ + #endif /* PIKE_DEBUG || TEST_MULTISET */