src/hid/lesstif/menu.c: Added doxygen comments for GetXY().
[geda-pcb/pcjc2.git] / src / rtree.c
blob2774409af3bb5e363445d6e8c9f90cdbf5df89d9
1 /*
2 * COPYRIGHT
4 * PCB, interactive printed circuit board design
5 * Copyright (C) 1994,1995,1996 Thomas Nau
6 * Copyright (C) 1998,1999,2000,2001,2002,2003,2004 harry eaton
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * This program is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
22 * Contact addresses for paper mail and Email:
23 * harry eaton, 6697 Buttonhole Ct, Columbia, MD 21044 USA
24 * haceaton@aplcomm.jhuapl.edu
28 /* this file, rtree.c, was written and is
29 * Copyright (c) 2004, harry eaton
32 /* implements r-tree structures.
33 * these should be much faster for the auto-router
34 * because the recursive search is much more efficient
35 * and that's where the auto-router spends all its time.
37 #ifdef HAVE_CONFIG_H
38 #include "config.h"
39 #endif
41 #include "global.h"
43 #include <assert.h>
44 #include <inttypes.h>
45 #include <setjmp.h>
47 #include "mymem.h"
49 #include "rtree.h"
51 #ifdef HAVE_LIBDMALLOC
52 #include <dmalloc.h>
53 #endif
55 #define SLOW_ASSERTS
56 /* All rectangles are closed on the bottom left and open on the
57 * top right. i.e. they contain one corner point, but not the other.
58 * This requires that the corner points not be equal!
61 /* the number of entries in each rtree node
62 * 4 - 7 seem to be pretty good settings
64 #define M_SIZE 6
66 /* it seems that sorting the leaf order slows us down
67 * but sometimes gives better routes
69 #undef SORT
70 #define SORT_NONLEAF
72 #define DELETE_BY_POINTER
74 typedef struct
76 const BoxType *bptr; /* pointer to the box */
77 BoxType bounds; /* copy of the box for locality of reference */
78 } Rentry;
80 struct rtree_node
82 BoxType box; /* bounds rectangle of this node */
83 struct rtree_node *parent; /* parent of this node, NULL = root */
84 struct
86 unsigned is_leaf:1; /* this is a leaf node */
87 unsigned manage:31; /* true==should free 'rect.bptr' if node is destroyed */
89 flags;
90 union
92 struct rtree_node *kids[M_SIZE + 1]; /* when not leaf */
93 Rentry rects[M_SIZE + 1]; /* when leaf */
94 } u;
97 #ifndef NDEBUG
98 #ifdef SLOW_ASSERTS
99 static int
100 __r_node_is_good (struct rtree_node *node)
102 int i, flag = 1;
103 int kind = -1;
104 bool last = false;
106 if (node == NULL)
107 return 1;
108 for (i = 0; i < M_SIZE; i++)
110 if (node->flags.is_leaf)
112 if (!node->u.rects[i].bptr)
114 last = true;
115 continue;
117 /* check that once one entry is empty, all the rest are too */
118 if (node->u.rects[i].bptr && last)
119 assert (0);
120 /* check that the box makes sense */
121 if (node->box.X1 > node->box.X2)
122 assert (0);
123 if (node->box.Y1 > node->box.Y2)
124 assert (0);
125 /* check that bounds is the same as the pointer */
126 if (node->u.rects[i].bounds.X1 != node->u.rects[i].bptr->X1)
127 assert (0);
128 if (node->u.rects[i].bounds.Y1 != node->u.rects[i].bptr->Y1)
129 assert (0);
130 if (node->u.rects[i].bounds.X2 != node->u.rects[i].bptr->X2)
131 assert (0);
132 if (node->u.rects[i].bounds.Y2 != node->u.rects[i].bptr->Y2)
133 assert (0);
134 /* check that entries are within node bounds */
135 if (node->u.rects[i].bounds.X1 < node->box.X1)
136 assert (0);
137 if (node->u.rects[i].bounds.X2 > node->box.X2)
138 assert (0);
139 if (node->u.rects[i].bounds.Y1 < node->box.Y1)
140 assert (0);
141 if (node->u.rects[i].bounds.Y2 > node->box.Y2)
142 assert (0);
144 else
146 if (!node->u.kids[i])
148 last = true;
149 continue;
151 /* make sure all children are the same type */
152 if (kind == -1)
153 kind = node->u.kids[i]->flags.is_leaf;
154 else if (kind != node->u.kids[i]->flags.is_leaf)
155 assert (0);
156 /* check that once one entry is empty, all the rest are too */
157 if (node->u.kids[i] && last)
158 assert (0);
159 /* check that entries are within node bounds */
160 if (node->u.kids[i]->box.X1 < node->box.X1)
161 assert (0);
162 if (node->u.kids[i]->box.X2 > node->box.X2)
163 assert (0);
164 if (node->u.kids[i]->box.Y1 < node->box.Y1)
165 assert (0);
166 if (node->u.kids[i]->box.Y2 > node->box.Y2)
167 assert (0);
169 flag <<= 1;
171 /* check that we're completely in the parent's bounds */
172 if (node->parent)
174 if (node->parent->box.X1 > node->box.X1)
175 assert (0);
176 if (node->parent->box.X2 < node->box.X2)
177 assert (0);
178 if (node->parent->box.Y1 > node->box.Y1)
179 assert (0);
180 if (node->parent->box.Y2 < node->box.Y2)
181 assert (0);
183 /* make sure overflow is empty */
184 if (!node->flags.is_leaf && node->u.kids[i])
185 assert (0);
186 if (node->flags.is_leaf && node->u.rects[i].bptr)
187 assert (0);
188 return 1;
191 /* check the whole tree from this node down for consistency */
192 static bool
193 __r_tree_is_good (struct rtree_node *node)
195 int i;
197 if (!node)
198 return 1;
199 if (!__r_node_is_good (node))
200 assert (0);
201 if (node->flags.is_leaf)
202 return 1;
203 for (i = 0; i < M_SIZE; i++)
205 if (!__r_tree_is_good (node->u.kids[i]))
206 return 0;
208 return 1;
210 #endif
211 #endif
213 #ifndef NDEBUG
214 /* print out the tree */
215 void
216 __r_dump_tree (struct rtree_node *node, int depth)
218 int i, j;
219 static int count;
220 static double area;
222 if (depth == 0)
224 area = 0;
225 count = 0;
227 area +=
228 (node->box.X2 - node->box.X1) * (double) (node->box.Y2 - node->box.Y1);
229 count++;
230 for (i = 0; i < depth; i++)
231 printf (" ");
232 if (!node->flags.is_leaf)
234 printf (
235 "p=0x%p node X(%" PRIi64 ", %" PRIi64 ") Y(%" PRIi64 ", %" PRIi64
236 ")\n",
237 (void *) node,
238 (int64_t) (node->box.X1),
239 (int64_t) (node->box.X2),
240 (int64_t) (node->box.Y1),
241 (int64_t) (node->box.Y2) );
243 else
245 printf (
246 "p=0x%p leaf manage(%02x) X(%" PRIi64 ", %" PRIi64 ") Y(%" PRIi64
247 ", %" PRIi64 ")\n",
248 (void *) node,
249 node->flags.manage,
250 (int64_t) (node->box.X1),
251 (int64_t) (node->box.X2),
252 (int64_t) (node->box.Y1),
253 (int64_t) (node->box.Y2) );
254 for (j = 0; j < M_SIZE; j++)
256 if (!node->u.rects[j].bptr)
257 break;
258 area +=
259 (node->u.rects[j].bounds.X2 -
260 node->u.rects[j].bounds.X1) *
261 (double) (node->u.rects[j].bounds.Y2 -
262 node->u.rects[j].bounds.Y1);
263 count++;
264 for (i = 0; i < depth + 1; i++)
265 printf (" ");
266 printf (
267 "entry 0x%p X(%" PRIi64 ", %" PRIi64 ") Y(%" PRIi64 ", "
268 "%" PRIi64 ")\n",
269 (void *) (node->u.rects[j].bptr),
270 (int64_t) (node->u.rects[j].bounds.X1),
271 (int64_t) (node->u.rects[j].bounds.X2),
272 (int64_t) (node->u.rects[j].bounds.Y1),
273 (int64_t) (node->u.rects[j].bounds.Y2) );
275 return;
277 for (i = 0; i < M_SIZE; i++)
278 if (node->u.kids[i])
279 __r_dump_tree (node->u.kids[i], depth + 1);
280 if (depth == 0)
281 printf ("average box area is %g\n", area / count);
283 #endif
285 /* Sort the children or entries of a node
286 * according to the largest side.
288 #ifdef SORT
289 static int
290 cmp_box (const BoxType * a, const BoxType * b)
292 /* compare two box coordinates so that the __r_search
293 * will fail at the earliest comparison possible.
294 * It needs to see the biggest X1 first, then the
295 * smallest X2, the biggest Y1 and smallest Y2
297 if (a->X1 < b->X1)
298 return 0;
299 if (a->X1 > b->X1)
300 return 1;
301 if (a->X2 > b->X2)
302 return 0;
303 if (a->X2 < b->X2)
304 return 1;
305 if (a->Y1 < b->Y1)
306 return 0;
307 if (a->Y1 > b->Y1)
308 return 1;
309 if (a->Y2 > b->Y2)
310 return 0;
311 return 1;
314 static void
315 sort_node (struct rtree_node *node)
317 if (node->flags.is_leaf)
319 register Rentry *r, *i, temp;
321 for (r = &node->u.rects[1]; r->bptr; r++)
323 temp = *r;
324 i = r - 1;
325 while (i >= &node->u.rects[0])
327 if (cmp_box (&i->bounds, &r->bounds))
328 break;
329 *(i + 1) = *i;
330 i--;
332 *(i + 1) = temp;
335 #ifdef SORT_NONLEAF
336 else
338 register struct rtree_node **r, **i, *temp;
340 for (r = &node->u.kids[1]; *r; r++)
342 temp = *r;
343 i = r - 1;
344 while (i >= &node->u.kids[0])
346 if (cmp_box (&(*i)->box, &(*r)->box))
347 break;
348 *(i + 1) = *i;
349 i--;
351 *(i + 1) = temp;
354 #endif
356 #else
357 #define sort_node(x)
358 #endif
360 /* set the node bounds large enough to encompass all
361 * of the children's rectangles
363 static void
364 adjust_bounds (struct rtree_node *node)
366 int i;
368 assert (node);
369 assert (node->u.kids[0]);
370 if (node->flags.is_leaf)
372 node->box = node->u.rects[0].bounds;
373 for (i = 1; i < M_SIZE + 1; i++)
375 if (!node->u.rects[i].bptr)
376 return;
377 MAKEMIN (node->box.X1, node->u.rects[i].bounds.X1);
378 MAKEMAX (node->box.X2, node->u.rects[i].bounds.X2);
379 MAKEMIN (node->box.Y1, node->u.rects[i].bounds.Y1);
380 MAKEMAX (node->box.Y2, node->u.rects[i].bounds.Y2);
383 else
385 node->box = node->u.kids[0]->box;
386 for (i = 1; i < M_SIZE + 1; i++)
388 if (!node->u.kids[i])
389 return;
390 MAKEMIN (node->box.X1, node->u.kids[i]->box.X1);
391 MAKEMAX (node->box.X2, node->u.kids[i]->box.X2);
392 MAKEMIN (node->box.Y1, node->u.kids[i]->box.Y1);
393 MAKEMAX (node->box.Y2, node->u.kids[i]->box.Y2);
398 /* create an r-tree from an unsorted list of boxes.
399 * the r-tree will keep pointers into
400 * it, so don't free the box list until you've called r_destroy_tree.
401 * if you set 'manage' to true, r_destroy_tree will free your boxlist.
403 rtree_t *
404 r_create_tree (const BoxType * boxlist[], int N, int manage)
406 rtree_t *rtree;
407 struct rtree_node *node;
408 int i;
410 assert (N >= 0);
411 rtree = (rtree_t *)calloc (1, sizeof (*rtree));
412 /* start with a single empty leaf node */
413 node = (struct rtree_node *)calloc (1, sizeof (*node));
414 node->flags.is_leaf = 1;
415 node->parent = NULL;
416 rtree->root = node;
417 /* simple, just insert all of the boxes! */
418 for (i = 0; i < N; i++)
420 assert (boxlist[i]);
421 r_insert_entry (rtree, boxlist[i], manage);
423 #ifdef SLOW_ASSERTS
424 assert (__r_tree_is_good (rtree->root));
425 #endif
426 return rtree;
429 static void
430 __r_destroy_tree (struct rtree_node *node)
432 int i, flag = 1;
434 if (node->flags.is_leaf)
435 for (i = 0; i < M_SIZE; i++)
437 if (!node->u.rects[i].bptr)
438 break;
439 if (node->flags.manage & flag)
440 free ((void *) node->u.rects[i].bptr);
441 flag = flag << 1;
443 else
444 for (i = 0; i < M_SIZE; i++)
446 if (!node->u.kids[i])
447 break;
448 __r_destroy_tree (node->u.kids[i]);
450 free (node);
453 /* free the memory associated with an rtree. */
454 void
455 r_destroy_tree (rtree_t ** rtree)
458 __r_destroy_tree ((*rtree)->root);
459 free (*rtree);
460 *rtree = NULL;
463 typedef struct
465 int (*check_it) (const BoxType * region, void *cl);
466 int (*found_it) (const BoxType * box, void *cl);
467 void *closure;
468 } r_arg;
470 /* most of the auto-routing time is spent in this routine
471 * so some careful thought has been given to maximizing the speed
475 __r_search (struct rtree_node *node, const BoxType * query, r_arg * arg)
477 assert (node);
478 /** assert that starting_region is well formed */
479 assert (query->X1 < query->X2 && query->Y1 < query->Y2);
480 assert (node->box.X1 < query->X2 && node->box.X2 > query->X1 &&
481 node->box.Y1 < query->Y2 && node->box.Y2 > query->Y1);
482 #ifdef SLOW_ASSERTS
483 /** assert that node is well formed */
484 assert (__r_node_is_good (node));
485 #endif
486 /* the check for bounds is done before entry. This saves the overhead
487 * of building/destroying the stack frame for each bounds that fails
488 * to intersect, which is the most common condition.
490 if (node->flags.is_leaf)
492 register int i;
493 if (arg->found_it) /* test this once outside of loop */
495 register int seen = 0;
496 for (i = 0; node->u.rects[i].bptr; i++)
498 if ((node->u.rects[i].bounds.X1 < query->X2) &&
499 (node->u.rects[i].bounds.X2 > query->X1) &&
500 (node->u.rects[i].bounds.Y1 < query->Y2) &&
501 (node->u.rects[i].bounds.Y2 > query->Y1) &&
502 arg->found_it (node->u.rects[i].bptr, arg->closure))
503 seen++;
505 return seen;
507 else
509 register int seen = 0;
510 for (i = 0; node->u.rects[i].bptr; i++)
512 if ((node->u.rects[i].bounds.X1 < query->X2) &&
513 (node->u.rects[i].bounds.X2 > query->X1) &&
514 (node->u.rects[i].bounds.Y1 < query->Y2) &&
515 (node->u.rects[i].bounds.Y2 > query->Y1))
516 seen++;
518 return seen;
522 /* not a leaf, recurse on lower nodes */
523 if (arg->check_it)
525 int seen = 0;
526 struct rtree_node **n;
527 for (n = &node->u.kids[0]; *n; n++)
529 if ((*n)->box.X1 >= query->X2 ||
530 (*n)->box.X2 <= query->X1 ||
531 (*n)->box.Y1 >= query->Y2 || (*n)->box.Y2 <= query->Y1 ||
532 !arg->check_it (&(*n)->box, arg->closure))
533 continue;
534 seen += __r_search (*n, query, arg);
536 return seen;
538 else
540 int seen = 0;
541 struct rtree_node **n;
542 for (n = &node->u.kids[0]; *n; n++)
544 if ((*n)->box.X1 >= query->X2 ||
545 (*n)->box.X2 <= query->X1 ||
546 (*n)->box.Y1 >= query->Y2 || (*n)->box.Y2 <= query->Y1)
547 continue;
548 seen += __r_search (*n, query, arg);
550 return seen;
554 /* Parameterized search in the rtree.
555 * Returns the number of rectangles found.
556 * calls found_rectangle for each intersection seen
557 * and calls check_region with the current sub-region
558 * to see whether deeper searching is desired
561 r_search (rtree_t * rtree, const BoxType * query,
562 int (*check_region) (const BoxType * region, void *cl),
563 int (*found_rectangle) (const BoxType * box, void *cl), void *cl)
565 r_arg arg;
567 if (!rtree || rtree->size < 1)
568 return 0;
569 if (query)
571 #ifdef SLOW_ASSERTS
572 assert (__r_tree_is_good (rtree->root));
573 #endif
574 #ifdef DEBUG
575 if (query->X2 <= query->X1 || query->Y2 <= query->Y1)
576 return 0;
577 #endif
578 /* check this box. If it's not touched we're done here */
579 if (rtree->root->box.X1 >= query->X2 ||
580 rtree->root->box.X2 <= query->X1 ||
581 rtree->root->box.Y1 >= query->Y2
582 || rtree->root->box.Y2 <= query->Y1)
583 return 0;
584 arg.check_it = check_region;
585 arg.found_it = found_rectangle;
586 arg.closure = cl;
587 return __r_search (rtree->root, query, &arg);
589 else
591 arg.check_it = check_region;
592 arg.found_it = found_rectangle;
593 arg.closure = cl;
594 return __r_search (rtree->root, &rtree->root->box, &arg);
598 /*------ r_region_is_empty ------*/
599 static int
600 __r_region_is_empty_rect_in_reg (const BoxType * box, void *cl)
602 jmp_buf *envp = (jmp_buf *) cl;
603 longjmp (*envp, 1); /* found one! */
606 /* return 0 if there are any rectangles in the given region. */
608 r_region_is_empty (rtree_t * rtree, const BoxType * region)
610 jmp_buf env;
611 #ifndef NDEBUG
612 int r;
613 #endif
615 if (setjmp (env))
616 return 0;
617 #ifndef NDEBUG
619 #endif
620 r_search (rtree, region, NULL, __r_region_is_empty_rect_in_reg, &env);
621 #ifndef NDEBUG
622 assert (r == 0); /* otherwise longjmp would have been called */
623 #endif
624 return 1; /* no rectangles found */
627 struct centroid
629 float x, y, area;
632 /* split the node into two nodes putting clusters in each
633 * use the k-means clustering algorithm
635 struct rtree_node *
636 find_clusters (struct rtree_node *node)
638 float total_a, total_b;
639 float a_X, a_Y, b_X, b_Y;
640 bool belong[M_SIZE + 1];
641 struct centroid center[M_SIZE + 1];
642 int clust_a, clust_b, tries;
643 int a_manage = 0, b_manage = 0;
644 int i, old_ax, old_ay, old_bx, old_by;
645 struct rtree_node *new_node;
646 BoxType *b;
648 for (i = 0; i < M_SIZE + 1; i++)
650 if (node->flags.is_leaf)
651 b = &(node->u.rects[i].bounds);
652 else
653 b = &(node->u.kids[i]->box);
654 center[i].x = 0.5 * (b->X1 + b->X2);
655 center[i].y = 0.5 * (b->Y1 + b->Y2);
656 /* adding 1 prevents zero area */
657 center[i].area = 1. + (float) (b->X2 - b->X1) * (float) (b->Y2 - b->Y1);
659 /* starting 'A' cluster center */
660 a_X = center[0].x;
661 a_Y = center[0].y;
662 /* starting 'B' cluster center */
663 b_X = center[M_SIZE].x;
664 b_Y = center[M_SIZE].y;
665 /* don't allow the same cluster centers */
666 if (b_X == a_X && b_Y == a_Y)
668 b_X += 10000;
669 a_Y -= 10000;
671 for (tries = 0; tries < M_SIZE; tries++)
673 old_ax = (int) a_X;
674 old_ay = (int) a_Y;
675 old_bx = (int) b_X;
676 old_by = (int) b_Y;
677 clust_a = clust_b = 0;
678 for (i = 0; i < M_SIZE + 1; i++)
680 float dist1, dist2;
682 dist1 = SQUARE (a_X - center[i].x) + SQUARE (a_Y - center[i].y);
683 dist2 = SQUARE (b_X - center[i].x) + SQUARE (b_Y - center[i].y);
684 if (dist1 * (clust_a + M_SIZE / 2) < dist2 * (clust_b + M_SIZE / 2))
686 belong[i] = true;
687 clust_a++;
689 else
691 belong[i] = false;
692 clust_b++;
695 /* kludge to fix degenerate cases */
696 if (clust_a == M_SIZE + 1)
697 belong[M_SIZE / 2] = false;
698 else if (clust_b == M_SIZE + 1)
699 belong[M_SIZE / 2] = true;
700 /* compute new center of gravity of clusters */
701 total_a = total_b = 0;
702 a_X = a_Y = b_X = b_Y = 0;
703 for (i = 0; i < M_SIZE + 1; i++)
705 if (belong[i])
707 a_X += center[i].x * center[i].area;
708 a_Y += center[i].y * center[i].area;
709 total_a += center[i].area;
711 else
713 b_X += center[i].x * center[i].area;
714 b_Y += center[i].y * center[i].area;
715 total_b += center[i].area;
718 a_X /= total_a;
719 a_Y /= total_a;
720 b_X /= total_b;
721 b_Y /= total_b;
722 if (old_ax == (int) a_X && old_ay == (int) a_Y &&
723 old_bx == (int) b_X && old_by == (int) b_Y)
724 break;
726 /* Now 'belong' has the partition map */
727 new_node = (struct rtree_node *)calloc (1, sizeof (*new_node));
728 new_node->parent = node->parent;
729 new_node->flags.is_leaf = node->flags.is_leaf;
730 clust_a = clust_b = 0;
731 if (node->flags.is_leaf)
733 int flag, a_flag, b_flag;
734 flag = a_flag = b_flag = 1;
735 for (i = 0; i < M_SIZE + 1; i++)
737 if (belong[i])
739 node->u.rects[clust_a++] = node->u.rects[i];
740 if (node->flags.manage & flag)
741 a_manage |= a_flag;
742 a_flag <<= 1;
744 else
746 new_node->u.rects[clust_b++] = node->u.rects[i];
747 if (node->flags.manage & flag)
748 b_manage |= b_flag;
749 b_flag <<= 1;
751 flag <<= 1;
754 else
756 for (i = 0; i < M_SIZE + 1; i++)
758 if (belong[i])
759 node->u.kids[clust_a++] = node->u.kids[i];
760 else
762 node->u.kids[i]->parent = new_node;
763 new_node->u.kids[clust_b++] = node->u.kids[i];
767 node->flags.manage = a_manage;
768 new_node->flags.manage = b_manage;
769 assert (clust_a != 0);
770 assert (clust_b != 0);
771 if (node->flags.is_leaf)
772 for (; clust_a < M_SIZE + 1; clust_a++)
773 node->u.rects[clust_a].bptr = NULL;
774 else
775 for (; clust_a < M_SIZE + 1; clust_a++)
776 node->u.kids[clust_a] = NULL;
777 adjust_bounds (node);
778 sort_node (node);
779 adjust_bounds (new_node);
780 sort_node (new_node);
781 return (new_node);
784 /* split a node according to clusters
786 static void
787 split_node (struct rtree_node *node)
789 int i;
790 struct rtree_node *new_node;
792 assert (node);
793 assert (node->flags.is_leaf ? (void *) node->u.rects[M_SIZE].
794 bptr : (void *) node->u.kids[M_SIZE]);
795 new_node = find_clusters (node);
796 if (node->parent == NULL) /* split root node */
798 struct rtree_node *second;
800 second = (struct rtree_node *)calloc (1, sizeof (*second));
801 *second = *node;
802 if (!second->flags.is_leaf)
803 for (i = 0; i < M_SIZE; i++)
804 if (second->u.kids[i])
805 second->u.kids[i]->parent = second;
806 node->flags.is_leaf = 0;
807 node->flags.manage = 0;
808 second->parent = new_node->parent = node;
809 node->u.kids[0] = new_node;
810 node->u.kids[1] = second;
811 for (i = 2; i < M_SIZE + 1; i++)
812 node->u.kids[i] = NULL;
813 adjust_bounds (node);
814 sort_node (node);
815 #ifdef SLOW_ASSERTS
816 assert (__r_tree_is_good (node));
817 #endif
818 return;
820 for (i = 0; i < M_SIZE; i++)
821 if (!node->parent->u.kids[i])
822 break;
823 node->parent->u.kids[i] = new_node;
824 #ifdef SLOW_ASSERTS
825 assert (__r_node_is_good (node));
826 assert (__r_node_is_good (new_node));
827 #endif
828 if (i < M_SIZE)
830 #ifdef SLOW_ASSERTS
831 assert (__r_node_is_good (node->parent));
832 #endif
833 sort_node (node->parent);
834 return;
836 split_node (node->parent);
839 static inline int
840 contained (struct rtree_node *node, const BoxType * query)
842 if (node->box.X1 > query->X1 || node->box.X2 < query->X2 ||
843 node->box.Y1 > query->Y1 || node->box.Y2 < query->Y2)
844 return 0;
845 return 1;
849 static inline double
850 penalty (struct rtree_node *node, const BoxType * query)
852 double score;
854 /* Compute the area penalty for inserting here and return.
855 * The penalty is the increase in area necessary
856 * to include the query->
858 score = (MAX (node->box.X2, query->X2) - MIN (node->box.X1, query->X1));
859 score *= (MAX (node->box.Y2, query->Y2) - MIN (node->box.Y1, query->Y1));
860 score -=
861 (double)(node->box.X2 - node->box.X1) *
862 (double)(node->box.Y2 - node->box.Y1);
863 return score;
866 static void
867 __r_insert_node (struct rtree_node *node, const BoxType * query,
868 int manage, bool force)
871 #ifdef SLOW_ASSERTS
872 assert (__r_node_is_good (node));
873 #endif
874 /* Ok, at this point we must already enclose the query or we're forcing
875 * this node to expand to enclose it, so if we're a leaf, simply store
876 * the query here
879 if (node->flags.is_leaf)
881 register int i;
883 if (UNLIKELY (manage))
885 register int flag = 1;
887 for (i = 0; i < M_SIZE; i++)
889 if (!node->u.rects[i].bptr)
890 break;
891 flag <<= 1;
893 node->flags.manage |= flag;
895 else
897 for (i = 0; i < M_SIZE; i++)
898 if (!node->u.rects[i].bptr)
899 break;
901 /* the node always has an extra space available */
902 node->u.rects[i].bptr = query;
903 node->u.rects[i].bounds = *query;
904 /* first entry in node determines initial bounding box */
905 if (i == 0)
906 node->box = *query;
907 else if (force)
909 MAKEMIN (node->box.X1, query->X1);
910 MAKEMAX (node->box.X2, query->X2);
911 MAKEMIN (node->box.Y1, query->Y1);
912 MAKEMAX (node->box.Y2, query->Y2);
914 if (i < M_SIZE)
916 sort_node (node);
917 return;
919 /* we must split the node */
920 split_node (node);
921 return;
923 else
925 int i;
926 struct rtree_node *best_node;
927 double score, best_score;
929 if (force)
931 MAKEMIN (node->box.X1, query->X1);
932 MAKEMAX (node->box.X2, query->X2);
933 MAKEMIN (node->box.Y1, query->Y1);
934 MAKEMAX (node->box.Y2, query->Y2);
937 /* this node encloses it, but it's not a leaf, so descend the tree */
939 /* First check if any children actually encloses it */
940 assert (node->u.kids[0]);
941 for (i = 0; i < M_SIZE; i++)
943 if (!node->u.kids[i])
944 break;
945 if (contained (node->u.kids[i], query))
947 __r_insert_node (node->u.kids[i], query, manage, false);
948 sort_node (node);
949 return;
953 /* see if there is room for a new leaf node */
954 if (node->u.kids[0]->flags.is_leaf && i < M_SIZE)
956 struct rtree_node *new_node;
957 new_node = (struct rtree_node *)calloc (1, sizeof (*new_node));
958 new_node->parent = node;
959 new_node->flags.is_leaf = true;
960 node->u.kids[i] = new_node;
961 new_node->u.rects[0].bptr = query;
962 new_node->u.rects[0].bounds = *query;
963 new_node->box = *query;
964 if (UNLIKELY (manage))
965 new_node->flags.manage = 1;
966 sort_node (node);
967 return;
970 /* Ok, so we're still here - look for the best child to push it into */
971 best_score = penalty (node->u.kids[0], query);
972 best_node = node->u.kids[0];
973 for (i = 1; i < M_SIZE; i++)
975 if (!node->u.kids[i])
976 break;
977 score = penalty (node->u.kids[i], query);
978 if (score < best_score)
980 best_score = score;
981 best_node = node->u.kids[i];
984 __r_insert_node (best_node, query, manage, true);
985 sort_node (node);
986 return;
990 void
991 r_insert_entry (rtree_t * rtree, const BoxType * which, int man)
993 assert (which);
994 assert (which->X1 <= which->X2);
995 assert (which->Y1 <= which->Y2);
996 /* recursively search the tree for the best leaf node */
997 assert (rtree->root);
998 __r_insert_node (rtree->root, which, man,
999 rtree->root->box.X1 > which->X1
1000 || rtree->root->box.X2 < which->X2
1001 || rtree->root->box.Y1 > which->Y1
1002 || rtree->root->box.Y2 < which->Y2);
1003 rtree->size++;
1006 bool
1007 __r_delete (struct rtree_node *node, const BoxType * query)
1009 int i, flag, mask, a;
1011 /* the tree might be inconsistent during delete */
1012 if (query->X1 < node->box.X1 || query->Y1 < node->box.Y1
1013 || query->X2 > node->box.X2 || query->Y2 > node->box.Y2)
1014 return false;
1015 if (!node->flags.is_leaf)
1017 for (i = 0; i < M_SIZE; i++)
1019 /* if this is us being removed, free and copy over */
1020 if (node->u.kids[i] == (struct rtree_node *) query)
1022 free ((void *) query);
1023 for (; i < M_SIZE; i++)
1025 node->u.kids[i] = node->u.kids[i + 1];
1026 if (!node->u.kids[i])
1027 break;
1029 /* nobody home here now ? */
1030 if (!node->u.kids[0])
1032 if (!node->parent)
1034 /* wow, the root is empty! */
1035 node->flags.is_leaf = 1;
1036 /* changing type of node, be sure it's all zero */
1037 for (i = 1; i < M_SIZE + 1; i++)
1038 node->u.rects[i].bptr = NULL;
1039 return true;
1041 return (__r_delete (node->parent, &node->box));
1043 else
1044 /* propegate boundary adjust upward */
1045 while (node)
1047 adjust_bounds (node);
1048 node = node->parent;
1050 return true;
1052 if (node->u.kids[i])
1054 if (__r_delete (node->u.kids[i], query))
1055 return true;
1057 else
1058 break;
1060 return false;
1062 /* leaf node here */
1063 mask = 0;
1064 a = 1;
1065 for (i = 0; i < M_SIZE; i++)
1067 #ifdef DELETE_BY_POINTER
1068 if (!node->u.rects[i].bptr || node->u.rects[i].bptr == query)
1069 #else
1070 if (node->u.rects[i].bounds.X1 == query->X1 &&
1071 node->u.rects[i].bounds.X2 == query->X2 &&
1072 node->u.rects[i].bounds.Y1 == query->Y1 &&
1073 node->u.rects[i].bounds.Y2 == query->Y2)
1074 #endif
1075 break;
1076 mask |= a;
1077 a <<= 1;
1079 if (!node->u.rects[i].bptr)
1080 return false; /* not at this leaf */
1081 if (node->flags.manage & a)
1083 free ((void *) node->u.rects[i].bptr);
1084 node->u.rects[i].bptr = NULL;
1086 /* squeeze the manage flags together */
1087 flag = node->flags.manage & mask;
1088 mask = (~mask) << 1;
1089 node->flags.manage = flag | ((node->flags.manage & mask) >> 1);
1090 /* remove the entry */
1091 for (; i < M_SIZE; i++)
1093 node->u.rects[i] = node->u.rects[i + 1];
1094 if (!node->u.rects[i].bptr)
1095 break;
1097 if (!node->u.rects[0].bptr)
1099 if (node->parent)
1100 __r_delete (node->parent, &node->box);
1101 return true;
1103 else
1104 /* propagate boundary adjustment upward */
1105 while (node)
1107 adjust_bounds (node);
1108 node = node->parent;
1110 return true;
1113 bool
1114 r_delete_entry (rtree_t * rtree, const BoxType * box)
1116 bool r;
1118 assert (box);
1119 assert (rtree);
1120 r = __r_delete (rtree->root, box);
1121 if (r)
1122 rtree->size--;
1123 #ifdef SLOW_ASSERTS
1124 assert (__r_tree_is_good (rtree->root));
1125 #endif
1126 return r;