]> git.saurik.com Git - apple/xnu.git/blob - bsd/hfs/hfscommon/headers/RedBlackTree.h
xnu-1699.22.73.tar.gz
[apple/xnu.git] / bsd / hfs / hfscommon / headers / RedBlackTree.h
1 /*
2 * Copyright (c) 2009 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29 /******************************************************************************
30 *
31 * Copyright (C) 2008 Jason Evans <jasone@FreeBSD.org>.
32 * All rights reserved.
33 *
34 * Redistribution and use in source and binary forms, with or without
35 * modification, are permitted provided that the following conditions
36 * are met:
37 * 1. Redistributions of source code must retain the above copyright
38 * notice(s), this list of conditions and the following disclaimer
39 * unmodified other than the allowable addition of one or more
40 * copyright notices.
41 * 2. Redistributions in binary form must reproduce the above copyright
42 * notice(s), this list of conditions and the following disclaimer in
43 * the documentation and/or other materials provided with the
44 * distribution.
45 *
46 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY
47 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
48 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
49 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) BE
50 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
51 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
52 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
53 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
54 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
55 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
56 * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
57 *
58 ******************************************************************************
59 *
60 * cpp macro implementation of left-leaning red-black trees.
61 *
62 * Usage:
63 *
64 * (Optional, see assert(3).)
65 * #define NDEBUG
66 *
67 * (Required.)
68 * #include <assert.h>
69 * #include <rb.h>
70 * ...
71 *
72 * All operations are done non-recursively. Parent pointers are not used, and
73 * color bits are stored in the least significant bit of right-child pointers,
74 * thus making node linkage as compact as is possible for red-black trees.
75 *
76 * Some macros use a comparison function pointer, which is expected to have the
77 * following prototype:
78 *
79 * int (a_cmp *)(a_type *a_node, a_type *a_other);
80 * ^^^^^^
81 * or a_key
82 *
83 * Interpretation of comparision function return values:
84 *
85 * -1 : a_node < a_other
86 * 0 : a_node == a_other
87 * 1 : a_node > a_other
88 *
89 * In all cases, the a_node or a_key macro argument is the first argument to the
90 * comparison function, which makes it possible to write comparison functions
91 * that treat the first argument specially.
92 *
93 ******************************************************************************/
94
95 #ifndef RB_H_
96 #define RB_H_
97
98 #define RB_COMPACT
99 #ifdef RB_COMPACT
100 /* Node structure. */
101 #define rb_node(a_type) \
102 struct { \
103 a_type *rbn_left; \
104 a_type *rbn_right_red; \
105 }
106 #else
107 #define rb_node(a_type) \
108 struct { \
109 a_type *rbn_left; \
110 a_type *rbn_right; \
111 bool rbn_red; \
112 }
113 #endif
114
115 /* Root structure. */
116 #define rb_tree(a_type) \
117 struct { \
118 a_type *rbt_root; \
119 a_type rbt_nil; \
120 }
121
122 /* Left accessors. */
123 #define rbp_left_get(a_type, a_field, a_node) \
124 ((a_node)->a_field.rbn_left)
125 #define rbp_left_set(a_type, a_field, a_node, a_left) do { \
126 (a_node)->a_field.rbn_left = a_left; \
127 } while (0)
128
129 #ifdef RB_COMPACT
130 /* Right accessors. */
131 #define rbp_right_get(a_type, a_field, a_node) \
132 ((a_type *) (((intptr_t) (a_node)->a_field.rbn_right_red) \
133 & ((ssize_t)-2)))
134 #define rbp_right_set(a_type, a_field, a_node, a_right) do { \
135 (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) a_right) \
136 | (((uintptr_t) (a_node)->a_field.rbn_right_red) & ((size_t)1))); \
137 } while (0)
138
139 /* Color accessors. */
140 #define rbp_red_get(a_type, a_field, a_node) \
141 ((bool) (((uintptr_t) (a_node)->a_field.rbn_right_red) \
142 & ((size_t)1)))
143 #define rbp_color_set(a_type, a_field, a_node, a_red) do { \
144 (a_node)->a_field.rbn_right_red = (a_type *) ((((intptr_t) \
145 (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)) \
146 | ((ssize_t)a_red)); \
147 } while (0)
148 #define rbp_red_set(a_type, a_field, a_node) do { \
149 (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) \
150 (a_node)->a_field.rbn_right_red) | ((size_t)1)); \
151 } while (0)
152 #define rbp_black_set(a_type, a_field, a_node) do { \
153 (a_node)->a_field.rbn_right_red = (a_type *) (((intptr_t) \
154 (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)); \
155 } while (0)
156 #else
157 /* Right accessors. */
158 #define rbp_right_get(a_type, a_field, a_node) \
159 ((a_node)->a_field.rbn_right)
160 #define rbp_right_set(a_type, a_field, a_node, a_right) do { \
161 (a_node)->a_field.rbn_right = a_right; \
162 } while (0)
163
164 /* Color accessors. */
165 #define rbp_red_get(a_type, a_field, a_node) \
166 ((a_node)->a_field.rbn_red)
167 #define rbp_color_set(a_type, a_field, a_node, a_red) do { \
168 (a_node)->a_field.rbn_red = (a_red); \
169 } while (0)
170 #define rbp_red_set(a_type, a_field, a_node) do { \
171 (a_node)->a_field.rbn_red = true; \
172 } while (0)
173 #define rbp_black_set(a_type, a_field, a_node) do { \
174 (a_node)->a_field.rbn_red = false; \
175 } while (0)
176 #endif
177
178 /* Node initializer. */
179 #define rbp_node_new(a_type, a_field, a_tree, a_node) do { \
180 rbp_left_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \
181 rbp_right_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \
182 rbp_red_set(a_type, a_field, (a_node)); \
183 } while (0)
184
185 /* Tree initializer. */
186 #define rb_new(a_type, a_field, a_tree) do { \
187 (a_tree)->rbt_root = &(a_tree)->rbt_nil; \
188 rbp_node_new(a_type, a_field, a_tree, &(a_tree)->rbt_nil); \
189 rbp_black_set(a_type, a_field, &(a_tree)->rbt_nil); \
190 } while (0)
191
192 /* Tree operations. */
193 #define rbp_black_height(a_type, a_field, a_tree, r_height) do { \
194 a_type *rbp_bh_t; \
195 for (rbp_bh_t = (a_tree)->rbt_root, (r_height) = 0; \
196 rbp_bh_t != &(a_tree)->rbt_nil; \
197 rbp_bh_t = rbp_left_get(a_type, a_field, rbp_bh_t)) { \
198 if (rbp_red_get(a_type, a_field, rbp_bh_t) == false) { \
199 (r_height)++; \
200 } \
201 } \
202 } while (0)
203
204 #define rbp_first(a_type, a_field, a_tree, a_root, r_node) do { \
205 for ((r_node) = (a_root); \
206 rbp_left_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \
207 (r_node) = rbp_left_get(a_type, a_field, (r_node))) { \
208 } \
209 } while (0)
210
211 #define rbp_last(a_type, a_field, a_tree, a_root, r_node) do { \
212 for ((r_node) = (a_root); \
213 rbp_right_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \
214 (r_node) = rbp_right_get(a_type, a_field, (r_node))) { \
215 } \
216 } while (0)
217
218 #define rbp_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
219 if (rbp_right_get(a_type, a_field, (a_node)) != &(a_tree)->rbt_nil) { \
220 rbp_first(a_type, a_field, a_tree, rbp_right_get(a_type, \
221 a_field, (a_node)), (r_node)); \
222 } else { \
223 a_type *rbp_n_t = (a_tree)->rbt_root; \
224 assert(rbp_n_t != &(a_tree)->rbt_nil); \
225 (r_node) = &(a_tree)->rbt_nil; \
226 while (true) { \
227 int rbp_n_cmp = (a_cmp)((a_node), rbp_n_t); \
228 if (rbp_n_cmp < 0) { \
229 (r_node) = rbp_n_t; \
230 rbp_n_t = rbp_left_get(a_type, a_field, rbp_n_t); \
231 } else if (rbp_n_cmp > 0) { \
232 rbp_n_t = rbp_right_get(a_type, a_field, rbp_n_t); \
233 } else { \
234 break; \
235 } \
236 assert(rbp_n_t != &(a_tree)->rbt_nil); \
237 } \
238 } \
239 } while (0)
240
241 #define rbp_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
242 if (rbp_left_get(a_type, a_field, (a_node)) != &(a_tree)->rbt_nil) {\
243 rbp_last(a_type, a_field, a_tree, rbp_left_get(a_type, \
244 a_field, (a_node)), (r_node)); \
245 } else { \
246 a_type *rbp_p_t = (a_tree)->rbt_root; \
247 assert(rbp_p_t != &(a_tree)->rbt_nil); \
248 (r_node) = &(a_tree)->rbt_nil; \
249 while (true) { \
250 int rbp_p_cmp = (a_cmp)((a_node), rbp_p_t); \
251 if (rbp_p_cmp < 0) { \
252 rbp_p_t = rbp_left_get(a_type, a_field, rbp_p_t); \
253 } else if (rbp_p_cmp > 0) { \
254 (r_node) = rbp_p_t; \
255 rbp_p_t = rbp_right_get(a_type, a_field, rbp_p_t); \
256 } else { \
257 break; \
258 } \
259 assert(rbp_p_t != &(a_tree)->rbt_nil); \
260 } \
261 } \
262 } while (0)
263
264 #define rb_first(a_type, a_field, a_tree, r_node) do { \
265 rbp_first(a_type, a_field, a_tree, (a_tree)->rbt_root, (r_node)); \
266 if ((r_node) == &(a_tree)->rbt_nil) { \
267 (r_node) = NULL; \
268 } \
269 } while (0)
270
271 #define rb_last(a_type, a_field, a_tree, r_node) do { \
272 rbp_last(a_type, a_field, a_tree, (a_tree)->rbt_root, r_node); \
273 if ((r_node) == &(a_tree)->rbt_nil) { \
274 (r_node) = NULL; \
275 } \
276 } while (0)
277
278 #define rb_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
279 rbp_next(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \
280 if ((r_node) == &(a_tree)->rbt_nil) { \
281 (r_node) = NULL; \
282 } \
283 } while (0)
284
285 #define rb_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
286 rbp_prev(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \
287 if ((r_node) == &(a_tree)->rbt_nil) { \
288 (r_node) = NULL; \
289 } \
290 } while (0)
291
292 #define rb_search(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \
293 int rbp_se_cmp; \
294 (r_node) = (a_tree)->rbt_root; \
295 while ((r_node) != &(a_tree)->rbt_nil && (rbp_se_cmp = (a_cmp)((a_key), (r_node))) != 0) { \
296 if (rbp_se_cmp < 0) { \
297 (r_node) = rbp_left_get(a_type, a_field, (r_node)); \
298 } else { \
299 (r_node) = rbp_right_get(a_type, a_field, (r_node)); \
300 } \
301 } \
302 if ((r_node) == &(a_tree)->rbt_nil) { \
303 (r_node) = NULL; \
304 } \
305 } while (0)
306
307 /*
308 * Find a match if it exists. Otherwise, find the next greater node, if one
309 * exists.
310 */
311 #define rb_nsearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \
312 a_type *rbp_ns_t = (a_tree)->rbt_root; \
313 (r_node) = NULL; \
314 while (rbp_ns_t != &(a_tree)->rbt_nil) { \
315 int rbp_ns_cmp = (a_cmp)((a_key), rbp_ns_t); \
316 if (rbp_ns_cmp < 0) { \
317 (r_node) = rbp_ns_t; \
318 rbp_ns_t = rbp_left_get(a_type, a_field, rbp_ns_t); \
319 } else if (rbp_ns_cmp > 0) { \
320 rbp_ns_t = rbp_right_get(a_type, a_field, rbp_ns_t); \
321 } else { \
322 (r_node) = rbp_ns_t; \
323 break; \
324 } \
325 } \
326 } while (0)
327
328 /*
329 * Find a match if it exists. Otherwise, find the previous lesser node, if one
330 * exists.
331 */
332 #define rb_psearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \
333 a_type *rbp_ps_t = (a_tree)->rbt_root; \
334 (r_node) = NULL; \
335 while (rbp_ps_t != &(a_tree)->rbt_nil) { \
336 int rbp_ps_cmp = (a_cmp)((a_key), rbp_ps_t); \
337 if (rbp_ps_cmp < 0) { \
338 rbp_ps_t = rbp_left_get(a_type, a_field, rbp_ps_t); \
339 } else if (rbp_ps_cmp > 0) { \
340 (r_node) = rbp_ps_t; \
341 rbp_ps_t = rbp_right_get(a_type, a_field, rbp_ps_t); \
342 } else { \
343 (r_node) = rbp_ps_t; \
344 break; \
345 } \
346 } \
347 } while (0)
348
349 #define rbp_rotate_left(a_type, a_field, a_node, r_node) do { \
350 (r_node) = rbp_right_get(a_type, a_field, (a_node)); \
351 rbp_right_set(a_type, a_field, (a_node), rbp_left_get(a_type, a_field, (r_node))); \
352 rbp_left_set(a_type, a_field, (r_node), (a_node)); \
353 } while (0)
354
355 #define rbp_rotate_right(a_type, a_field, a_node, r_node) do { \
356 (r_node) = rbp_left_get(a_type, a_field, (a_node)); \
357 rbp_left_set(a_type, a_field, (a_node), rbp_right_get(a_type, a_field, (r_node))); \
358 rbp_right_set(a_type, a_field, (r_node), (a_node)); \
359 } while (0)
360
361 #define rbp_lean_left(a_type, a_field, a_node, r_node) do { \
362 bool rbp_ll_red; \
363 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
364 rbp_ll_red = rbp_red_get(a_type, a_field, (a_node)); \
365 rbp_color_set(a_type, a_field, (r_node), rbp_ll_red); \
366 rbp_red_set(a_type, a_field, (a_node)); \
367 } while (0)
368
369 #define rbp_lean_right(a_type, a_field, a_node, r_node) do { \
370 bool rbp_lr_red; \
371 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
372 rbp_lr_red = rbp_red_get(a_type, a_field, (a_node)); \
373 rbp_color_set(a_type, a_field, (r_node), rbp_lr_red); \
374 rbp_red_set(a_type, a_field, (a_node)); \
375 } while (0)
376
377 #define rbp_move_red_left(a_type, a_field, a_node, r_node) do { \
378 a_type *rbp_mrl_t, *rbp_mrl_u; \
379 rbp_mrl_t = rbp_left_get(a_type, a_field, (a_node)); \
380 rbp_red_set(a_type, a_field, rbp_mrl_t); \
381 rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \
382 rbp_mrl_u = rbp_left_get(a_type, a_field, rbp_mrl_t); \
383 if (rbp_red_get(a_type, a_field, rbp_mrl_u)) { \
384 rbp_rotate_right(a_type, a_field, rbp_mrl_t, rbp_mrl_u); \
385 rbp_right_set(a_type, a_field, (a_node), rbp_mrl_u); \
386 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
387 rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \
388 if (rbp_red_get(a_type, a_field, rbp_mrl_t)) { \
389 rbp_black_set(a_type, a_field, rbp_mrl_t); \
390 rbp_red_set(a_type, a_field, (a_node)); \
391 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrl_t); \
392 rbp_left_set(a_type, a_field, (r_node), rbp_mrl_t); \
393 } else { \
394 rbp_black_set(a_type, a_field, (a_node)); \
395 } \
396 } else { \
397 rbp_red_set(a_type, a_field, (a_node)); \
398 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
399 } \
400 } while (0)
401
402 #define rbp_move_red_right(a_type, a_field, a_node, r_node) do { \
403 a_type *rbp_mrr_t; \
404 rbp_mrr_t = rbp_left_get(a_type, a_field, (a_node)); \
405 if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \
406 a_type *rbp_mrr_u, *rbp_mrr_v; \
407 rbp_mrr_u = rbp_right_get(a_type, a_field, rbp_mrr_t); \
408 rbp_mrr_v = rbp_left_get(a_type, a_field, rbp_mrr_u); \
409 if (rbp_red_get(a_type, a_field, rbp_mrr_v)) { \
410 rbp_color_set(a_type, a_field, rbp_mrr_u, rbp_red_get(a_type, a_field, (a_node))); \
411 rbp_black_set(a_type, a_field, rbp_mrr_v); \
412 rbp_rotate_left(a_type, a_field, rbp_mrr_t, rbp_mrr_u); \
413 rbp_left_set(a_type, a_field, (a_node), rbp_mrr_u); \
414 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
415 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \
416 rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \
417 } else { \
418 rbp_color_set(a_type, a_field, rbp_mrr_t, rbp_red_get(a_type, a_field, (a_node))); \
419 rbp_red_set(a_type, a_field, rbp_mrr_u); \
420 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
421 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \
422 rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \
423 } \
424 rbp_red_set(a_type, a_field, (a_node)); \
425 } else { \
426 rbp_red_set(a_type, a_field, rbp_mrr_t); \
427 rbp_mrr_t = rbp_left_get(a_type, a_field, rbp_mrr_t); \
428 if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \
429 rbp_black_set(a_type, a_field, rbp_mrr_t); \
430 rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
431 rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \
432 rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \
433 } else { \
434 rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
435 } \
436 } \
437 } while (0)
438
439 #define rb_insert(a_type, a_field, a_cmp, a_tree, a_node) do { \
440 a_type rbp_i_s; \
441 a_type *rbp_i_g, *rbp_i_p, *rbp_i_c, *rbp_i_t, *rbp_i_u; \
442 int rbp_i_cmp = 0; \
443 rbp_i_g = &(a_tree)->rbt_nil; \
444 rbp_left_set(a_type, a_field, &rbp_i_s, (a_tree)->rbt_root); \
445 rbp_right_set(a_type, a_field, &rbp_i_s, &(a_tree)->rbt_nil); \
446 rbp_black_set(a_type, a_field, &rbp_i_s); \
447 rbp_i_p = &rbp_i_s; \
448 rbp_i_c = (a_tree)->rbt_root; \
449 /* Iteratively search down the tree for the insertion point, */\
450 /* splitting 4-nodes as they are encountered. At the end of each */\
451 /* iteration, rbp_i_g->rbp_i_p->rbp_i_c is a 3-level path down */\
452 /* the tree, assuming a sufficiently deep tree. */\
453 while (rbp_i_c != &(a_tree)->rbt_nil) { \
454 rbp_i_t = rbp_left_get(a_type, a_field, rbp_i_c); \
455 rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \
456 if (rbp_red_get(a_type, a_field, rbp_i_t) \
457 && rbp_red_get(a_type, a_field, rbp_i_u)) { \
458 /* rbp_i_c is the top of a logical 4-node, so split it. */\
459 /* This iteration does not move down the tree, due to the */\
460 /* disruptiveness of node splitting. */\
461 /* */\
462 /* Rotate right. */\
463 rbp_rotate_right(a_type, a_field, rbp_i_c, rbp_i_t); \
464 /* Pass red links up one level. */\
465 rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \
466 rbp_black_set(a_type, a_field, rbp_i_u); \
467 if (rbp_left_get(a_type, a_field, rbp_i_p) == rbp_i_c) { \
468 rbp_left_set(a_type, a_field, rbp_i_p, rbp_i_t); \
469 rbp_i_c = rbp_i_t; \
470 } else { \
471 /* rbp_i_c was the right child of rbp_i_p, so rotate */\
472 /* left in order to maintain the left-leaning */\
473 /* invariant. */\
474 assert(rbp_right_get(a_type, a_field, rbp_i_p) == rbp_i_c); \
475 rbp_right_set(a_type, a_field, rbp_i_p, rbp_i_t); \
476 rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_u); \
477 if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\
478 rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_u); \
479 } else { \
480 assert(rbp_right_get(a_type, a_field, rbp_i_g) == rbp_i_p); \
481 rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_u); \
482 } \
483 rbp_i_p = rbp_i_u; \
484 rbp_i_cmp = (a_cmp)((a_node), rbp_i_p); \
485 if (rbp_i_cmp < 0) { \
486 rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_p); \
487 } else { \
488 assert(rbp_i_cmp > 0); \
489 rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_p); \
490 } \
491 continue; \
492 } \
493 } \
494 rbp_i_g = rbp_i_p; \
495 rbp_i_p = rbp_i_c; \
496 rbp_i_cmp = (a_cmp)((a_node), rbp_i_c); \
497 if (rbp_i_cmp < 0) { \
498 rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_c); \
499 } else { \
500 assert(rbp_i_cmp > 0); \
501 rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_c); \
502 } \
503 } \
504 /* rbp_i_p now refers to the node under which to insert. */\
505 rbp_node_new(a_type, a_field, a_tree, (a_node)); \
506 if (rbp_i_cmp > 0) { \
507 rbp_right_set(a_type, a_field, rbp_i_p, (a_node)); \
508 rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_t); \
509 if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) { \
510 rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_t); \
511 } else if (rbp_right_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\
512 rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_t); \
513 } \
514 } else { \
515 rbp_left_set(a_type, a_field, rbp_i_p, (a_node)); \
516 } \
517 /* Update the root and make sure that it is black. */\
518 (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_i_s); \
519 rbp_black_set(a_type, a_field, (a_tree)->rbt_root); \
520 } while (0)
521
522 #define rb_remove(a_type, a_field, a_cmp, a_tree, a_node) do { \
523 a_type rbp_r_s; \
524 a_type *rbp_r_p, *rbp_r_c, *rbp_r_xp, *rbp_r_t, *rbp_r_u; \
525 int rbp_r_cmp; \
526 rbp_left_set(a_type, a_field, &rbp_r_s, (a_tree)->rbt_root); \
527 rbp_right_set(a_type, a_field, &rbp_r_s, &(a_tree)->rbt_nil); \
528 rbp_black_set(a_type, a_field, &rbp_r_s); \
529 rbp_r_p = &rbp_r_s; \
530 rbp_r_c = (a_tree)->rbt_root; \
531 rbp_r_xp = &(a_tree)->rbt_nil; \
532 /* Iterate down the tree, but always transform 2-nodes to 3- or */\
533 /* 4-nodes in order to maintain the invariant that the current */\
534 /* node is not a 2-node. This allows simple deletion once a leaf */\
535 /* is reached. Handle the root specially though, since there may */\
536 /* be no way to convert it from a 2-node to a 3-node. */\
537 rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \
538 if (rbp_r_cmp < 0) { \
539 rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \
540 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
541 if (rbp_red_get(a_type, a_field, rbp_r_t) == false \
542 && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \
543 /* Apply standard transform to prepare for left move. */\
544 rbp_move_red_left(a_type, a_field, rbp_r_c, rbp_r_t); \
545 rbp_black_set(a_type, a_field, rbp_r_t); \
546 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \
547 rbp_r_c = rbp_r_t; \
548 } else { \
549 /* Move left. */\
550 rbp_r_p = rbp_r_c; \
551 rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \
552 } \
553 } else { \
554 if (rbp_r_cmp == 0) { \
555 assert((a_node) == rbp_r_c); \
556 if (rbp_right_get(a_type, a_field, rbp_r_c) == &(a_tree)->rbt_nil) { \
557 /* Delete root node (which is also a leaf node). */\
558 if (rbp_left_get(a_type, a_field, rbp_r_c) != &(a_tree)->rbt_nil) { \
559 rbp_lean_right(a_type, a_field, rbp_r_c, rbp_r_t); \
560 rbp_right_set(a_type, a_field, rbp_r_t, &(a_tree)->rbt_nil); \
561 } else { \
562 rbp_r_t = &(a_tree)->rbt_nil; \
563 } \
564 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \
565 } else { \
566 /* This is the node we want to delete, but we will */\
567 /* instead swap it with its successor and delete the */\
568 /* successor. Record enough information to do the */\
569 /* swap later. rbp_r_xp is the a_node's parent. */\
570 rbp_r_xp = rbp_r_p; \
571 rbp_r_cmp = 1; /* Note that deletion is incomplete. */\
572 } \
573 } \
574 if (rbp_r_cmp == 1) { \
575 if (rbp_red_get(a_type, a_field, rbp_left_get(a_type, \
576 a_field, rbp_right_get(a_type, a_field, rbp_r_c))) == false) { \
577 rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \
578 if (rbp_red_get(a_type, a_field, rbp_r_t)) { \
579 /* Standard transform. */\
580 rbp_move_red_right(a_type, a_field, rbp_r_c, rbp_r_t); \
581 } else { \
582 /* Root-specific transform. */\
583 rbp_red_set(a_type, a_field, rbp_r_c); \
584 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
585 if (rbp_red_get(a_type, a_field, rbp_r_u)) { \
586 rbp_black_set(a_type, a_field, rbp_r_u); \
587 rbp_rotate_right(a_type, a_field, rbp_r_c, rbp_r_t); \
588 rbp_rotate_left(a_type, a_field, rbp_r_c, rbp_r_u); \
589 rbp_right_set(a_type, a_field, rbp_r_t, rbp_r_u); \
590 } else { \
591 rbp_red_set(a_type, a_field, rbp_r_t); \
592 rbp_rotate_left(a_type, a_field, rbp_r_c, rbp_r_t); \
593 } \
594 } \
595 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \
596 rbp_r_c = rbp_r_t; \
597 } else { \
598 /* Move right */\
599 rbp_r_p = rbp_r_c; \
600 rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \
601 } \
602 } \
603 } \
604 if (rbp_r_cmp != 0) { \
605 while (true) { \
606 assert(rbp_r_p != &(a_tree)->rbt_nil); \
607 rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \
608 if (rbp_r_cmp < 0) { \
609 rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \
610 if (rbp_r_t == &(a_tree)->rbt_nil) { \
611 /* rbp_r_c now refers to the successor node to */\
612 /* relocate, and rbp_r_xp/a_node refer to the */\
613 /* context for the relocation. */\
614 if (rbp_left_get(a_type, a_field, rbp_r_xp) == (a_node)) { \
615 rbp_left_set(a_type, a_field, rbp_r_xp, rbp_r_c); \
616 } else { \
617 assert(rbp_right_get(a_type, a_field, rbp_r_xp) == (a_node)); \
618 rbp_right_set(a_type, a_field, rbp_r_xp, rbp_r_c); \
619 } \
620 rbp_left_set(a_type, a_field, rbp_r_c, rbp_left_get(a_type, a_field, (a_node))); \
621 rbp_right_set(a_type, a_field, rbp_r_c, rbp_right_get(a_type, a_field, (a_node))); \
622 rbp_color_set(a_type, a_field, rbp_r_c, rbp_red_get(a_type, a_field, (a_node))); \
623 if (rbp_left_get(a_type, a_field, rbp_r_p) == rbp_r_c) { \
624 rbp_left_set(a_type, a_field, rbp_r_p, &(a_tree)->rbt_nil); \
625 } else { \
626 assert(rbp_right_get(a_type, a_field, rbp_r_p) == rbp_r_c); \
627 rbp_right_set(a_type, a_field, rbp_r_p, &(a_tree)->rbt_nil); \
628 } \
629 break; \
630 } \
631 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
632 if (rbp_red_get(a_type, a_field, rbp_r_t) == false \
633 && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \
634 rbp_move_red_left(a_type, a_field, rbp_r_c, rbp_r_t); \
635 if (rbp_left_get(a_type, a_field, rbp_r_p) == rbp_r_c) { \
636 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
637 } else { \
638 rbp_right_set(a_type, a_field, rbp_r_p, rbp_r_t); \
639 } \
640 rbp_r_c = rbp_r_t; \
641 } else { \
642 rbp_r_p = rbp_r_c; \
643 rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \
644 } \
645 } else { \
646 /* Check whether to delete this node (it has to be */\
647 /* the correct node and a leaf node). */\
648 if (rbp_r_cmp == 0) { \
649 assert((a_node) == rbp_r_c); \
650 if (rbp_right_get(a_type, a_field, rbp_r_c) == &(a_tree)->rbt_nil) { \
651 /* Delete leaf node. */\
652 if (rbp_left_get(a_type, a_field, rbp_r_c) != &(a_tree)->rbt_nil) { \
653 rbp_lean_right(a_type, a_field, rbp_r_c, rbp_r_t); \
654 rbp_right_set(a_type, a_field, rbp_r_t, &(a_tree)->rbt_nil); \
655 } else { \
656 rbp_r_t = &(a_tree)->rbt_nil; \
657 } \
658 if (rbp_left_get(a_type, a_field, rbp_r_p) == rbp_r_c) { \
659 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \
660 } else { \
661 rbp_right_set(a_type, a_field, rbp_r_p, rbp_r_t); \
662 } \
663 break; \
664 } else { \
665 /* This is the node we want to delete, but we */\
666 /* will instead swap it with its successor */\
667 /* and delete the successor. Record enough */\
668 /* information to do the swap later. */\
669 /* rbp_r_xp is a_node's parent. */\
670 rbp_r_xp = rbp_r_p; \
671 } \
672 } \
673 rbp_r_t = rbp_right_get(a_type, a_field, rbp_r_c); \
674 rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
675 if (rbp_red_get(a_type, a_field, rbp_r_u) == false) { \
676 rbp_move_red_right(a_type, a_field, rbp_r_c, \
677 rbp_r_t); \
678 if (rbp_left_get(a_type, a_field, rbp_r_p) == rbp_r_c) { \
679 rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
680 } else { \
681 rbp_right_set(a_type, a_field, rbp_r_p, rbp_r_t); \
682 } \
683 rbp_r_c = rbp_r_t; \
684 } else { \
685 rbp_r_p = rbp_r_c; \
686 rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \
687 } \
688 } \
689 } \
690 } \
691 /* Update root. */\
692 (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_r_s); \
693 } while (0)
694
695 /*
696 * The rb_wrap() macro provides a convenient way to wrap functions around the
697 * cpp macros. The main benefits of wrapping are that 1) repeated macro
698 * expansion can cause code bloat, especially for rb_{insert,remove)(), and
699 * 2) type, linkage, comparison functions, etc. need not be specified at every
700 * call point.
701 */
702
703 #define rb_wrap(a_attr, a_prefix, a_tree_type, a_type, a_field, a_cmp) \
704 a_attr void \
705 a_prefix##new(a_tree_type *tree) { \
706 rb_new(a_type, a_field, tree); \
707 } \
708 a_attr a_type * \
709 a_prefix##first(a_tree_type *tree) { \
710 a_type *ret; \
711 rb_first(a_type, a_field, tree, ret); \
712 return (ret); \
713 } \
714 a_attr a_type * \
715 a_prefix##last(a_tree_type *tree) { \
716 a_type *ret; \
717 rb_last(a_type, a_field, tree, ret); \
718 return (ret); \
719 } \
720 a_attr a_type * \
721 a_prefix##next(a_tree_type *tree, a_type *node) { \
722 a_type *ret; \
723 rb_next(a_type, a_field, a_cmp, tree, node, ret); \
724 return (ret); \
725 } \
726 a_attr a_type * \
727 a_prefix##prev(a_tree_type *tree, a_type *node) { \
728 a_type *ret; \
729 rb_prev(a_type, a_field, a_cmp, tree, node, ret); \
730 return (ret); \
731 } \
732 a_attr a_type * \
733 a_prefix##search(a_tree_type *tree, a_type *key) { \
734 a_type *ret; \
735 rb_search(a_type, a_field, a_cmp, tree, key, ret); \
736 return (ret); \
737 } \
738 a_attr a_type * \
739 a_prefix##nsearch(a_tree_type *tree, a_type *key) { \
740 a_type *ret; \
741 rb_nsearch(a_type, a_field, a_cmp, tree, key, ret); \
742 return (ret); \
743 } \
744 a_attr a_type * \
745 a_prefix##psearch(a_tree_type *tree, a_type *key) { \
746 a_type *ret; \
747 rb_psearch(a_type, a_field, a_cmp, tree, key, ret); \
748 return (ret); \
749 } \
750 a_attr void \
751 a_prefix##insert(a_tree_type *tree, a_type *node) { \
752 rb_insert(a_type, a_field, a_cmp, tree, node); \
753 } \
754 a_attr void \
755 a_prefix##remove(a_tree_type *tree, a_type *node) { \
756 rb_remove(a_type, a_field, a_cmp, tree, node); \
757 }
758
759 /*
760 * The iterators simulate recursion via an array of pointers that store the
761 * current path. This is critical to performance, since a series of calls to
762 * rb_{next,prev}() would require time proportional to (n lg n), whereas this
763 * implementation only requires time proportional to (n).
764 *
765 * Since the iterators cache a path down the tree, any tree modification may
766 * cause the cached path to become invalid. In order to continue iteration,
767 * use something like the following sequence:
768 *
769 * {
770 * a_type *node, *tnode;
771 *
772 * rb_foreach_begin(a_type, a_field, a_tree, node) {
773 * ...
774 * rb_next(a_type, a_field, a_cmp, a_tree, node, tnode);
775 * rb_remove(a_type, a_field, a_cmp, a_tree, node);
776 * rb_foreach_next(a_type, a_field, a_cmp, a_tree, tnode);
777 * ...
778 * } rb_foreach_end(a_type, a_field, a_tree, node)
779 * }
780 *
781 * Note that this idiom is not advised if every iteration modifies the tree,
782 * since in that case there is no algorithmic complexity improvement over a
783 * series of rb_{next,prev}() calls, thus making the setup overhead wasted
784 * effort.
785 */
786
787 #define rb_foreach_begin(a_type, a_field, a_tree, a_var) { /* brace A */ \
788 /* Compute the maximum possible tree depth (3X the black height). */\
789 unsigned rbp_f_height; \
790 rbp_black_height(a_type, a_field, a_tree, rbp_f_height); \
791 rbp_f_height *= 3; \
792 { /* brace B */ \
793 /* Initialize the path to contain the left spine. */\
794 a_type *rbp_f_path[rbp_f_height]; \
795 a_type *rbp_f_node; \
796 bool rbp_f_synced = false; \
797 unsigned rbp_f_depth = 0; \
798 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
799 rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \
800 rbp_f_depth++; \
801 while ((rbp_f_node = rbp_left_get(a_type, a_field, \
802 rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \
803 rbp_f_path[rbp_f_depth] = rbp_f_node; \
804 rbp_f_depth++; \
805 } \
806 } \
807 /* While the path is non-empty, iterate. */\
808 while (rbp_f_depth > 0) { /* brace C */ \
809 (a_var) = rbp_f_path[rbp_f_depth-1];
810
811 /*
812 * Note that rb_foreach_begin omits closing }'s because
813 * it expects that it will be succeeded by a call to
814 * rb_foreach_end which will have the closing }
815 */
816
817 /* Only use if modifying the tree during iteration. */
818 #define rb_foreach_next(a_type, a_field, a_cmp, a_tree, a_node) \
819 /* Re-initialize the path to contain the path to a_node. */\
820 rbp_f_depth = 0; \
821 if (a_node != NULL) { \
822 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
823 rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \
824 rbp_f_depth++; \
825 rbp_f_node = rbp_f_path[0]; \
826 while (true) { \
827 int rbp_f_cmp = (a_cmp)((a_node), \
828 rbp_f_path[rbp_f_depth-1]); \
829 if (rbp_f_cmp < 0) { \
830 rbp_f_node = rbp_left_get(a_type, a_field, \
831 rbp_f_path[rbp_f_depth-1]); \
832 } else if (rbp_f_cmp > 0) { \
833 rbp_f_node = rbp_right_get(a_type, a_field, \
834 rbp_f_path[rbp_f_depth-1]); \
835 } else { \
836 break; \
837 } \
838 assert(rbp_f_node != &(a_tree)->rbt_nil); \
839 rbp_f_path[rbp_f_depth] = rbp_f_node; \
840 rbp_f_depth++; \
841 } \
842 } \
843 } \
844 rbp_f_synced = true;
845
846 #define rb_foreach_end(a_type, a_field, a_tree, a_var) \
847 if (rbp_f_synced) { \
848 rbp_f_synced = false; \
849 continue; \
850 } \
851 /* Find the successor. */\
852 if ((rbp_f_node = rbp_right_get(a_type, a_field, \
853 rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \
854 /* The successor is the left-most node in the right */\
855 /* subtree. */\
856 rbp_f_path[rbp_f_depth] = rbp_f_node; \
857 rbp_f_depth++; \
858 while ((rbp_f_node = rbp_left_get(a_type, a_field, \
859 rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \
860 rbp_f_path[rbp_f_depth] = rbp_f_node; \
861 rbp_f_depth++; \
862 } \
863 } else { \
864 /* The successor is above the current node. Unwind */\
865 /* until a left-leaning edge is removed from the */\
866 /* path, or the path is empty. */\
867 for (rbp_f_depth--; rbp_f_depth > 0; rbp_f_depth--) { \
868 if (rbp_left_get(a_type, a_field, rbp_f_path[rbp_f_depth-1]) \
869 == rbp_f_path[rbp_f_depth]) { \
870 break; \
871 } \
872 } \
873 } \
874 } /* close brace C */ \
875 } /* close brace B */ \
876 } /* close brace A */
877
878
879
880 #define rb_foreach_reverse_begin(a_type, a_field, a_tree, a_var) { /* brace A */ \
881 /* Compute the maximum possible tree depth (3X the black height). */\
882 unsigned rbp_fr_height; \
883 rbp_black_height(a_type, a_field, a_tree, rbp_fr_height); \
884 rbp_fr_height *= 3; \
885 { /* brace B */ \
886 /* Initialize the path to contain the right spine. */\
887 a_type *rbp_fr_path[rbp_fr_height]; \
888 a_type *rbp_fr_node; \
889 bool rbp_fr_synced = false; \
890 unsigned rbp_fr_depth = 0; \
891 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
892 rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \
893 rbp_fr_depth++; \
894 while ((rbp_fr_node = rbp_right_get(a_type, a_field, \
895 rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \
896 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
897 rbp_fr_depth++; \
898 } \
899 } \
900 /* While the path is non-empty, iterate. */\
901 while (rbp_fr_depth > 0) { /* brace C */ \
902 (a_var) = rbp_fr_path[rbp_fr_depth-1];
903
904
905 /* Only use if modifying the tree during iteration. */
906 #define rb_foreach_reverse_prev(a_type, a_field, a_cmp, a_tree, a_node) \
907 /* Re-initialize the path to contain the path to a_node. */\
908 rbp_fr_depth = 0; \
909 if (a_node != NULL) { \
910 if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
911 rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \
912 rbp_fr_depth++; \
913 rbp_fr_node = rbp_fr_path[0]; \
914 while (true) { \
915 int rbp_fr_cmp = (a_cmp)((a_node), rbp_fr_path[rbp_fr_depth-1]); \
916 if (rbp_fr_cmp < 0) { \
917 rbp_fr_node = rbp_left_get(a_type, a_field, \
918 rbp_fr_path[rbp_fr_depth-1]); \
919 } else if (rbp_fr_cmp > 0) { \
920 rbp_fr_node = rbp_right_get(a_type, a_field, rbp_fr_path[rbp_fr_depth-1]); \
921 } else { \
922 break; \
923 } \
924 assert(rbp_fr_node != &(a_tree)->rbt_nil); \
925 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
926 rbp_fr_depth++; \
927 } \
928 } \
929 } \
930 rbp_fr_synced = true;
931
932 #define rb_foreach_reverse_end(a_type, a_field, a_tree, a_var) \
933 if (rbp_fr_synced) { \
934 rbp_fr_synced = false; \
935 continue; \
936 } \
937 if (rbp_fr_depth == 0) { \
938 /* rb_foreach_reverse_sync() was called with a NULL */\
939 /* a_node. */\
940 break; \
941 } \
942 /* Find the predecessor. */\
943 if ((rbp_fr_node = rbp_left_get(a_type, a_field, \
944 rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \
945 /* The predecessor is the right-most node in the left */\
946 /* subtree. */\
947 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
948 rbp_fr_depth++; \
949 while ((rbp_fr_node = rbp_right_get(a_type, a_field, \
950 rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\
951 rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
952 rbp_fr_depth++; \
953 } \
954 } else { \
955 /* The predecessor is above the current node. Unwind */\
956 /* until a right-leaning edge is removed from the */\
957 /* path, or the path is empty. */\
958 for (rbp_fr_depth--; rbp_fr_depth > 0; rbp_fr_depth--) {\
959 if (rbp_right_get(a_type, a_field, rbp_fr_path[rbp_fr_depth-1]) \
960 == rbp_fr_path[rbp_fr_depth]) { \
961 break; \
962 } \
963 } \
964 } \
965 } /* Close brace C */ \
966 } /* close brace B */ \
967 } /* close brace A*/
968
969 #endif /* RB_H_ */