Prune in single pass to speed up. Add playout threshold to limit pruning time.
[pachi/derm.git] / uct / tree.c
blob72269ad89d148dba78337973cecd79f490ac5d5b
1 #include <assert.h>
2 #include <math.h>
3 #include <stddef.h>
4 #include <stdint.h>
5 #include <stdio.h>
6 #include <stdlib.h>
7 #include <string.h>
9 #define DEBUG
10 #include "board.h"
11 #include "debug.h"
12 #include "engine.h"
13 #include "move.h"
14 #include "playout.h"
15 #include "tactics.h"
16 #include "timeinfo.h"
17 #include "uct/internal.h"
18 #include "uct/prior.h"
19 #include "uct/tree.h"
22 /* Allocate one node in the fast_alloc mode. The returned node
23 * is _not_ initialized. Returns NULL if not enough memory.
24 * This function may be called by multiple threads in parallel. */
25 static struct tree_node *
26 tree_fast_alloc_node(struct tree *t)
28 assert(t->nodes != NULL);
29 struct tree_node *n = NULL;
30 unsigned long old_size =__sync_fetch_and_add(&t->nodes_size, sizeof(*n));
32 /* The test below works even if max_tree_size is not a
33 * multiple of the node size because tree_init() allocates
34 * space for an extra node. */
35 if (old_size < t->max_tree_size)
36 n = (struct tree_node *)(t->nodes + old_size);
37 return n;
40 /* Allocate and initialize a node. Returns NULL (fast_alloc mode)
41 * or exits the main program if not enough memory.
42 * This function may be called by multiple threads in parallel. */
43 static struct tree_node *
44 tree_init_node(struct tree *t, coord_t coord, int depth)
46 struct tree_node *n;
47 if (t->nodes) {
48 n = tree_fast_alloc_node(t);
49 if (!n) return n;
50 memset(n, 0, sizeof(*n));
51 } else {
52 n = calloc(1, sizeof(*n));
53 if (!n) {
54 fprintf(stderr, "tree_init_node(): OUT OF MEMORY\n");
55 exit(1);
57 __sync_fetch_and_add(&t->nodes_size, sizeof(*n));
59 n->coord = coord;
60 n->depth = depth;
61 volatile static long c = 1000000;
62 n->hash = __sync_fetch_and_add(&c, 1);
63 if (depth > t->max_depth)
64 t->max_depth = depth;
65 return n;
68 /* Create a tree structure. Pre-allocate all nodes if max_tree_size is > 0. */
69 struct tree *
70 tree_init(struct board *board, enum stone color, unsigned long max_tree_size)
72 struct tree *t = calloc(1, sizeof(*t));
73 t->board = board;
74 t->max_tree_size = max_tree_size;
75 if (max_tree_size != 0) {
76 /* Allocate one extra node, max_tree_size may not be multiple of node size. */
77 t->nodes = malloc(max_tree_size + sizeof(struct tree_node));
78 /* The nodes buffer doesn't need initialization. This is currently
79 * done by tree_init_node to spread the load. Doing a memset for the
80 * entire buffer here would be too slow for large trees (>10 GB). */
81 if (!t->nodes) {
82 fprintf(stderr, "tree_init(): OUT OF MEMORY\n");
83 exit(1);
86 /* The root PASS move is only virtual, we never play it. */
87 t->root = tree_init_node(t, pass, 0);
88 t->root_symmetry = board->symmetry;
89 t->root_color = stone_other(color); // to research black moves, root will be white
90 return t;
94 /* This function may be called by multiple threads in parallel on the
95 * same tree, but not on node n. n may be detached from the tree but
96 * must have been created in this tree originally.
97 * It returns the remaining size of the tree after n has been freed. */
98 static unsigned long
99 tree_done_node(struct tree *t, struct tree_node *n)
101 struct tree_node *ni = n->children;
102 while (ni) {
103 struct tree_node *nj = ni->sibling;
104 tree_done_node(t, ni);
105 ni = nj;
107 free(n);
108 unsigned long old_size = __sync_fetch_and_sub(&t->nodes_size, sizeof(*n));
109 return old_size - sizeof(*n);
112 struct subtree_ctx {
113 struct tree *t;
114 struct tree_node *n;
117 /* Worker thread for tree_done_node_detached(). Only for fast_alloc=false. */
118 static void *
119 tree_done_node_worker(void *ctx_)
121 struct subtree_ctx *ctx = ctx_;
122 char *str = coord2str(ctx->n->coord, ctx->t->board);
124 unsigned long tree_size = tree_done_node(ctx->t, ctx->n);
125 if (!tree_size)
126 free(ctx->t);
127 if (DEBUGL(2))
128 fprintf(stderr, "done freeing node at %s, tree size %lu\n", str, tree_size);
129 free(str);
130 free(ctx);
131 return NULL;
134 /* Asynchronously free the subtree of nodes rooted at n. If the tree becomes
135 * empty free the tree also. Only for fast_alloc=false. */
136 static void
137 tree_done_node_detached(struct tree *t, struct tree_node *n)
139 if (n->u.playouts < 1000) { // no thread for small tree
140 if (!tree_done_node(t, n))
141 free(t);
142 return;
144 pthread_attr_t attr;
145 pthread_attr_init(&attr);
146 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
148 pthread_t thread;
149 struct subtree_ctx *ctx = malloc(sizeof(struct subtree_ctx));
150 if (!ctx) {
151 fprintf(stderr, "tree_done_node_detached(): OUT OF MEMORY\n");
152 exit(1);
154 ctx->t = t;
155 ctx->n = n;
156 pthread_create(&thread, &attr, tree_done_node_worker, ctx);
157 pthread_attr_destroy(&attr);
160 void
161 tree_done(struct tree *t)
163 if (t->chchvals) free(t->chchvals);
164 if (t->chvals) free(t->chvals);
165 if (t->nodes) {
166 free(t->nodes);
167 free(t);
168 } else if (!tree_done_node(t, t->root)) {
169 free(t);
170 /* A tree_done_node_worker might still be running on this tree but
171 * it will free the tree later. It is also freeing nodes faster than
172 * we will create new ones. */
177 static void
178 tree_node_dump(struct tree *tree, struct tree_node *node, int l, int thres)
180 for (int i = 0; i < l; i++) fputc(' ', stderr);
181 int children = 0;
182 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
183 children++;
184 /* We use 1 as parity, since for all nodes we want to know the
185 * win probability of _us_, not the node color. */
186 fprintf(stderr, "[%s] %f %% %d [prior %f %% %d amaf %f %% %d]; hints %x; %d children <%"PRIhash">\n",
187 coord2sstr(node->coord, tree->board),
188 tree_node_get_value(tree, 1, node->u.value), node->u.playouts,
189 tree_node_get_value(tree, 1, node->prior.value), node->prior.playouts,
190 tree_node_get_value(tree, 1, node->amaf.value), node->amaf.playouts,
191 node->hints, children, node->hash);
193 /* Print nodes sorted by #playouts. */
195 struct tree_node *nbox[1000]; int nboxl = 0;
196 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
197 if (ni->u.playouts > thres)
198 nbox[nboxl++] = ni;
200 while (true) {
201 int best = -1;
202 for (int i = 0; i < nboxl; i++)
203 if (nbox[i] && (best < 0 || nbox[i]->u.playouts > nbox[best]->u.playouts))
204 best = i;
205 if (best < 0)
206 break;
207 tree_node_dump(tree, nbox[best], l + 1, /* node->u.value < 0.1 ? 0 : */ thres);
208 nbox[best] = NULL;
212 void
213 tree_dump_chval(struct tree *tree, struct move_stats *v)
215 for (int y = board_size(tree->board) - 2; y > 1; y--) {
216 for (int x = 1; x < board_size(tree->board) - 1; x++) {
217 coord_t c = coord_xy(tree->board, x, y);
218 fprintf(stderr, "%.2f%%%05d ", v[c].value, v[c].playouts);
220 fprintf(stderr, "\n");
224 void
225 tree_dump(struct tree *tree, int thres)
227 if (thres && tree->root->u.playouts / thres > 100) {
228 /* Be a bit sensible about this; the opening book can create
229 * huge dumps at first. */
230 thres = tree->root->u.playouts / 100 * (thres < 1000 ? 1 : thres / 1000);
232 fprintf(stderr, "(UCT tree; root %s; extra komi %f)\n",
233 stone2str(tree->root_color), tree->extra_komi);
234 tree_node_dump(tree, tree->root, 0, thres);
236 if (DEBUGL(3) && tree->chvals) {
237 fprintf(stderr, "children stats:\n");
238 tree_dump_chval(tree, tree->chvals);
239 fprintf(stderr, "grandchildren stats:\n");
240 tree_dump_chval(tree, tree->chchvals);
245 static char *
246 tree_book_name(struct board *b)
248 static char buf[256];
249 if (b->handicap > 0) {
250 sprintf(buf, "uctbook-%d-%02.01f-h%d.pachitree", b->size - 2, b->komi, b->handicap);
251 } else {
252 sprintf(buf, "uctbook-%d-%02.01f.pachitree", b->size - 2, b->komi);
254 return buf;
257 static void
258 tree_node_save(FILE *f, struct tree_node *node, int thres)
260 bool save_children = node->u.playouts >= thres;
262 if (!save_children)
263 node->is_expanded = 0;
265 fputc(1, f);
266 fwrite(((void *) node) + offsetof(struct tree_node, depth),
267 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
268 1, f);
270 if (save_children) {
271 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
272 tree_node_save(f, ni, thres);
273 } else {
274 if (node->children)
275 node->is_expanded = 1;
278 fputc(0, f);
281 void
282 tree_save(struct tree *tree, struct board *b, int thres)
284 char *filename = tree_book_name(b);
285 FILE *f = fopen(filename, "wb");
286 if (!f) {
287 perror("fopen");
288 return;
290 tree_node_save(f, tree->root, thres);
291 fputc(0, f);
292 fclose(f);
296 void
297 tree_node_load(FILE *f, struct tree_node *node, int *num)
299 (*num)++;
301 fread(((void *) node) + offsetof(struct tree_node, depth),
302 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
303 1, f);
305 /* Keep values in sane scale, otherwise we start overflowing. */
306 #define MAX_PLAYOUTS 10000000
307 if (node->u.playouts > MAX_PLAYOUTS) {
308 node->u.playouts = MAX_PLAYOUTS;
310 if (node->amaf.playouts > MAX_PLAYOUTS) {
311 node->amaf.playouts = MAX_PLAYOUTS;
313 memcpy(&node->pamaf, &node->amaf, sizeof(node->amaf));
314 memcpy(&node->pu, &node->u, sizeof(node->u));
316 struct tree_node *ni = NULL, *ni_prev = NULL;
317 while (fgetc(f)) {
318 ni_prev = ni; ni = calloc(1, sizeof(*ni));
319 if (!node->children)
320 node->children = ni;
321 else
322 ni_prev->sibling = ni;
323 ni->parent = node;
324 tree_node_load(f, ni, num);
328 void
329 tree_load(struct tree *tree, struct board *b)
331 char *filename = tree_book_name(b);
332 FILE *f = fopen(filename, "rb");
333 if (!f)
334 return;
336 fprintf(stderr, "Loading opening book %s...\n", filename);
338 int num = 0;
339 if (fgetc(f))
340 tree_node_load(f, tree->root, &num);
341 fprintf(stderr, "Loaded %d nodes.\n", num);
343 fclose(f);
347 static struct tree_node *
348 tree_node_copy(struct tree_node *node)
350 struct tree_node *n2 = malloc(sizeof(*n2));
351 *n2 = *node;
352 if (!node->children)
353 return n2;
354 struct tree_node *ni = node->children;
355 struct tree_node *ni2 = tree_node_copy(ni);
356 n2->children = ni2; ni2->parent = n2;
357 while ((ni = ni->sibling)) {
358 ni2->sibling = tree_node_copy(ni);
359 ni2 = ni2->sibling; ni2->parent = n2;
361 return n2;
364 struct tree *
365 tree_copy(struct tree *tree)
367 assert(!tree->nodes);
368 struct tree *t2 = malloc(sizeof(*t2));
369 *t2 = *tree;
370 t2->root = tree_node_copy(tree->root);
371 return t2;
374 /* Copy the subtree rooted at node: all nodes at or below depth
375 * or with at least threshold playouts. Only for fast_alloc.
376 * The code is destructive on src, and the order of nodes is changed.
377 * Returns the copy of node in the destination tree, or NULL
378 * if we could not copy it. */
379 static struct tree_node *
380 tree_prune(struct tree *dest, struct tree *src, struct tree_node *node,
381 int threshold, int depth)
383 assert(dest->nodes && node);
384 struct tree_node *n2 = tree_fast_alloc_node(dest);
385 assert(n2);
386 *n2 = *node;
387 if (n2->depth > dest->max_depth)
388 dest->max_depth = n2->depth;
389 n2->children = NULL;
390 n2->is_expanded = false;
392 if (node->depth >= depth && node->u.playouts < threshold)
393 return n2;
394 /* For deep nodes with many playouts, we must copy all children,
395 * even those with zero playouts, because partially expanded
396 * nodes are not supported. Considering them as fully expanded
397 * would degrade the playing strength. The only exception is
398 * when dest becomes full, but this should never happen in practice
399 * if threshold is chosen to limit the number of nodes traversed. */
400 struct tree_node *ni = node->children;
401 while (ni && unlikely(dest->nodes_size < dest->max_tree_size)) {
402 struct tree_node *ni2 = tree_prune(dest, src, ni, threshold, depth);
403 assert(ni2);
404 ni2->sibling = n2->children;
405 n2->children = ni2;
406 ni2->parent = n2;
407 ni = ni->sibling;
409 if (n2->children && !ni) {
410 n2->is_expanded = true;
411 } else {
412 n2->children = NULL; // avoid partially expanded nodes
414 return n2;
417 /* The following constants are used for garbage collection of nodes.
418 * A tree is considered large if the top node has >= 40K playouts.
419 * For such trees, we copy deep nodes only if they have >= 40 playouts.
420 * These constants define how much time we're willing to spend
421 * scanning the source tree when promoting a move. The values 40K
422 * and 40 makes worst case pruning in about 3s for 20 GB ram, and this
423 * is only for long thinking time (>1M playouts). For fast games the
424 * trees don't grow large. For small ram or fast game we copy the
425 * entire tree. These values do not degrade playing strength and are
426 * necessary to avoid losing on time; increasing MIN_DEEP_PLAYOUTS
427 * or decreasing LARGE_TREE_PLAYOUTS will make the program faster but
428 * playing worse. */
429 #define LARGE_TREE_PLAYOUTS 40000
430 #define MIN_DEEP_PLAYOUTS 40
432 /* Free all the tree, keeping only the subtree rooted at node.
433 * Prune the subtree if necessary to fit in max_size bytes or
434 * to save time scanning the tree.
435 * Returns the moved node. Only for fast_alloc. */
436 static struct tree_node *
437 tree_garbage_collect(struct tree *tree, unsigned long max_size, struct tree_node *node)
439 assert(tree->nodes && !node->parent && !node->sibling);
440 double start_time = time_now();
442 struct tree *temp_tree = tree_init(tree->board, tree->root_color, max_size);
443 temp_tree->nodes_size = 0; // We do not want the dummy pass node
444 struct tree_node *temp_node;
446 /* Find the maximum depth at which we can copy all nodes. */
447 int max_nodes = 1;
448 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
449 max_nodes++;
450 unsigned long nodes_size = max_nodes * sizeof(*node);
451 int max_depth = node->depth;
452 while (nodes_size < max_size && max_nodes > 1) {
453 max_nodes--;
454 nodes_size += max_nodes * nodes_size;
455 max_depth++;
458 /* Copy all nodes for small trees. For large trees, copy all nodes
459 * with depth <= max_depth, and all nodes with at least MIN_DEEP_PLAYOUTS.
460 * Avoiding going too deep (except for nodes with many playouts) is mostly
461 * to save time scanning the source tree. It can take over 20s to traverse
462 * completely a large source tree (20 GB) even without copying because
463 * the traversal is not friendly at all with the memory cache. */
464 if (node->u.playouts < LARGE_TREE_PLAYOUTS) {
465 temp_node = tree_prune(temp_tree, tree, node, 0, max_depth + 20);
466 } else {
467 temp_node = tree_prune(temp_tree, tree, node, MIN_DEEP_PLAYOUTS, max_depth);
469 assert(temp_node);
471 /* Now copy back to original tree. */
472 tree->nodes_size = 0;
473 tree->max_depth = 0;
474 struct tree_node *new_node = tree_prune(tree, temp_tree, temp_node, 0, temp_tree->max_depth);
476 if (DEBUGL(1)) {
477 double now = time_now();
478 static double prev_time;
479 if (!prev_time) prev_time = start_time;
480 fprintf(stderr,
481 "tree pruned in %0.6g s, prev %0.3g s ago, dest depth %d wanted %d,"
482 " max_size %lu, pruned size %lu, playouts %d\n",
483 now - start_time, start_time - prev_time, temp_tree->max_depth, max_depth,
484 max_size, temp_tree->nodes_size, new_node->u.playouts);
485 prev_time = start_time;
487 assert(tree->nodes_size == temp_tree->nodes_size);
488 assert(tree->max_depth == temp_tree->max_depth);
489 tree_done(temp_tree);
490 return new_node;
494 static void
495 tree_node_merge(struct tree_node *dest, struct tree_node *src)
497 /* Do not merge nodes that weren't touched at all. */
498 assert(dest->pamaf.playouts == src->pamaf.playouts);
499 assert(dest->pu.playouts == src->pu.playouts);
500 if (src->amaf.playouts - src->pamaf.playouts == 0
501 && src->u.playouts - src->pu.playouts == 0) {
502 return;
505 dest->hints |= src->hints;
507 /* Merge the children, both are coord-sorted lists. */
508 struct tree_node *di = dest->children, **dref = &dest->children;
509 struct tree_node *si = src->children, **sref = &src->children;
510 while (di && si) {
511 if (di->coord != si->coord) {
512 /* src has some extra items or misses di */
513 struct tree_node *si2 = si->sibling;
514 while (si2 && di->coord != si2->coord) {
515 si2 = si2->sibling;
517 if (!si2)
518 goto next_di; /* src misses di, move on */
519 /* chain the extra [si,si2) items before di */
520 (*dref) = si;
521 while (si->sibling != si2) {
522 si->parent = dest;
523 si = si->sibling;
525 si->parent = dest;
526 si->sibling = di;
527 si = si2;
528 (*sref) = si;
530 /* Matching nodes - recurse... */
531 tree_node_merge(di, si);
532 /* ...and move on. */
533 sref = &si->sibling; si = si->sibling;
534 next_di:
535 dref = &di->sibling; di = di->sibling;
537 if (si) {
538 /* Some outstanding nodes are left on src side, rechain
539 * them to dst. */
540 (*dref) = si;
541 while (si) {
542 si->parent = dest;
543 si = si->sibling;
545 (*sref) = NULL;
548 /* Priors should be constant. */
549 assert(dest->prior.playouts == src->prior.playouts && dest->prior.value == src->prior.value);
551 stats_merge(&dest->amaf, &src->amaf);
552 stats_merge(&dest->u, &src->u);
555 /* Merge two trees built upon the same board. Note that the operation is
556 * destructive on src. */
557 void
558 tree_merge(struct tree *dest, struct tree *src)
560 /* Not suitable for fast_alloc which reorders children. */
561 assert(!dest->nodes);
563 if (src->max_depth > dest->max_depth)
564 dest->max_depth = src->max_depth;
565 tree_node_merge(dest->root, src->root);
569 static void
570 tree_node_normalize(struct tree_node *node, int factor)
572 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
573 tree_node_normalize(ni, factor);
575 #define normalize(s1, s2, t) node->s2.t = node->s1.t + (node->s2.t - node->s1.t) / factor;
576 normalize(pamaf, amaf, playouts);
577 memcpy(&node->pamaf, &node->amaf, sizeof(node->amaf));
579 normalize(pu, u, playouts);
580 memcpy(&node->pu, &node->u, sizeof(node->u));
581 #undef normalize
584 /* Normalize a tree, dividing the amaf and u values by given
585 * factor; otherwise, simulations run in independent threads
586 * two trees built upon the same board. To correctly handle
587 * results taken from previous simulation run, they are backed
588 * up in tree. */
589 void
590 tree_normalize(struct tree *tree, int factor)
592 tree_node_normalize(tree->root, factor);
596 /* Tree symmetry: When possible, we will localize the tree to a single part
597 * of the board in tree_expand_node() and possibly flip along symmetry axes
598 * to another part of the board in tree_promote_at(). We follow b->symmetry
599 * guidelines here. */
602 /* This function must be thread safe, given that board b is only modified by the calling thread. */
603 void
604 tree_expand_node(struct tree *t, struct tree_node *node, struct board *b, enum stone color, struct uct *u, int parity)
606 /* Get a Common Fate Graph distance map from parent node. */
607 int distances[board_size2(b)];
608 if (!is_pass(b->last_move.coord) && !is_resign(b->last_move.coord)) {
609 cfg_distances(b, node->coord, distances, TREE_NODE_D_MAX);
610 } else {
611 // Pass or resign - everything is too far.
612 foreach_point(b) { distances[c] = TREE_NODE_D_MAX + 1; } foreach_point_end;
615 /* Get a map of prior values to initialize the new nodes with. */
616 struct prior_map map = {
617 .b = b,
618 .to_play = color,
619 .parity = tree_parity(t, parity),
620 .distances = distances,
622 // Include pass in the prior map.
623 struct move_stats map_prior[board_size2(b) + 1]; map.prior = &map_prior[1];
624 bool map_consider[board_size2(b) + 1]; map.consider = &map_consider[1];
625 memset(map_prior, 0, sizeof(map_prior));
626 memset(map_consider, 0, sizeof(map_consider));
627 struct move pm = { .color = color };
628 map.consider[pass] = true;
629 foreach_point(b) {
630 if (board_at(b, c) != S_NONE)
631 continue;
632 pm.coord = c;
633 if (!board_is_valid_move(b, &pm))
634 continue;
635 map.consider[c] = true;
636 } foreach_point_end;
637 uct_prior(u, node, &map);
639 /* Now, create the nodes. */
640 struct tree_node *ni = tree_init_node(t, pass, node->depth + 1);
641 /* In fast_alloc mode we might temporarily run out of nodes but
642 * this should be rare if MIN_FREE_MEM_PERCENT is set correctly. */
643 if (!ni) {
644 node->is_expanded = false;
645 return;
647 struct tree_node *first_child = ni;
648 ni->parent = node;
649 ni->prior = map.prior[pass]; ni->d = TREE_NODE_D_MAX + 1;
651 /* The loop considers only the symmetry playground. */
652 if (UDEBUGL(6)) {
653 fprintf(stderr, "expanding %s within [%d,%d],[%d,%d] %d-%d\n",
654 coord2sstr(node->coord, b),
655 b->symmetry.x1, b->symmetry.y1,
656 b->symmetry.x2, b->symmetry.y2,
657 b->symmetry.type, b->symmetry.d);
659 for (int i = b->symmetry.x1; i <= b->symmetry.x2; i++) {
660 for (int j = b->symmetry.y1; j <= b->symmetry.y2; j++) {
661 if (b->symmetry.d) {
662 int x = b->symmetry.type == SYM_DIAG_DOWN ? board_size(b) - 1 - i : i;
663 if (x > j) {
664 if (UDEBUGL(7))
665 fprintf(stderr, "drop %d,%d\n", i, j);
666 continue;
670 coord_t c = coord_xy_otf(i, j, t->board);
671 if (!map.consider[c]) // Filter out invalid moves
672 continue;
673 assert(c != node->coord); // I have spotted "C3 C3" in some sequence...
675 struct tree_node *nj = tree_init_node(t, c, node->depth + 1);
676 if (!nj) {
677 node->is_expanded = false;
678 return;
680 nj->parent = node; ni->sibling = nj; ni = nj;
682 ni->prior = map.prior[c];
683 ni->d = distances[c];
686 node->children = first_child; // must be done at the end to avoid race
690 static coord_t
691 flip_coord(struct board *b, coord_t c,
692 bool flip_horiz, bool flip_vert, int flip_diag)
694 int x = coord_x(c, b), y = coord_y(c, b);
695 if (flip_diag) {
696 int z = x; x = y; y = z;
698 if (flip_horiz) {
699 x = board_size(b) - 1 - x;
701 if (flip_vert) {
702 y = board_size(b) - 1 - y;
704 return coord_xy_otf(x, y, b);
707 static void
708 tree_fix_node_symmetry(struct board *b, struct tree_node *node,
709 bool flip_horiz, bool flip_vert, int flip_diag)
711 if (!is_pass(node->coord))
712 node->coord = flip_coord(b, node->coord, flip_horiz, flip_vert, flip_diag);
714 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
715 tree_fix_node_symmetry(b, ni, flip_horiz, flip_vert, flip_diag);
718 static void
719 tree_fix_symmetry(struct tree *tree, struct board *b, coord_t c)
721 if (is_pass(c))
722 return;
724 struct board_symmetry *s = &tree->root_symmetry;
725 int cx = coord_x(c, b), cy = coord_y(c, b);
727 /* playground X->h->v->d normalization
728 * :::.. .d...
729 * .::.. v....
730 * ..:.. .....
731 * ..... h...X
732 * ..... ..... */
733 bool flip_horiz = cx < s->x1 || cx > s->x2;
734 bool flip_vert = cy < s->y1 || cy > s->y2;
736 bool flip_diag = 0;
737 if (s->d) {
738 bool dir = (s->type == SYM_DIAG_DOWN);
739 int x = dir ^ flip_horiz ^ flip_vert ? board_size(b) - 1 - cx : cx;
740 if (flip_vert ? x < cy : x > cy) {
741 flip_diag = 1;
745 if (DEBUGL(4)) {
746 fprintf(stderr, "%s [%d,%d -> %d,%d;%d,%d] will flip %d %d %d -> %s, sym %d (%d) -> %d (%d)\n",
747 coord2sstr(c, b),
748 cx, cy, s->x1, s->y1, s->x2, s->y2,
749 flip_horiz, flip_vert, flip_diag,
750 coord2sstr(flip_coord(b, c, flip_horiz, flip_vert, flip_diag), b),
751 s->type, s->d, b->symmetry.type, b->symmetry.d);
753 if (flip_horiz || flip_vert || flip_diag)
754 tree_fix_node_symmetry(b, tree->root, flip_horiz, flip_vert, flip_diag);
758 static void
759 tree_unlink_node(struct tree_node *node)
761 struct tree_node *ni = node->parent;
762 if (ni->children == node) {
763 ni->children = node->sibling;
764 } else {
765 ni = ni->children;
766 while (ni->sibling != node)
767 ni = ni->sibling;
768 ni->sibling = node->sibling;
770 node->sibling = NULL;
771 node->parent = NULL;
774 /* Promotes the given node as the root of the tree. In the fast_alloc
775 * mode, the node may be moved and some of its subtree may be pruned. */
776 void
777 tree_promote_node(struct tree *tree, struct tree_node **node)
779 assert((*node)->parent == tree->root);
780 tree_unlink_node(*node);
781 if (!tree->nodes) {
782 /* Freeing the rest of the tree can take several seconds on large
783 * trees, so we must do it asynchronously: */
784 tree_done_node_detached(tree, tree->root);
785 } else {
786 unsigned long min_free_size = (MIN_FREE_MEM_PERCENT * tree->max_tree_size) / 100;
787 if (tree->nodes_size >= tree->max_tree_size - min_free_size)
788 *node = tree_garbage_collect(tree, min_free_size, *node);
789 /* If we still have enough free memory, we will free everything later. */
791 tree->root = *node;
792 tree->root_color = stone_other(tree->root_color);
793 board_symmetry_update(tree->board, &tree->root_symmetry, (*node)->coord);
794 /* If the tree deepest node was under node, or if we called tree_garbage_collect,
795 * tree->max_depth is correct. Otherwise we could traverse the tree
796 * to recompute max_depth but it's not worth it: it's just for debugging
797 * and soon the tree will grow and max_depth will become correct again. */
798 if (tree->chchvals) { free(tree->chchvals); tree->chchvals = NULL; }
799 if (tree->chvals) { free(tree->chvals); tree->chvals = NULL; }
802 bool
803 tree_promote_at(struct tree *tree, struct board *b, coord_t c)
805 tree_fix_symmetry(tree, b, c);
807 for (struct tree_node *ni = tree->root->children; ni; ni = ni->sibling) {
808 if (ni->coord == c) {
809 tree_promote_node(tree, &ni);
810 return true;
813 return false;