BOARD_TRAIT_SAFE: Introduce, making btraits.safe optional (currently unused, expensive)
[pachi/t.git] / uct / tree.c
blobcf70174dcff6cf1194fd9bfb33848c1dd8a3be23
1 #include <assert.h>
2 #include <math.h>
3 #include <stddef.h>
4 #include <stdint.h>
5 #include <stdio.h>
6 #include <stdlib.h>
7 #include <string.h>
9 #define DEBUG
10 #include "board.h"
11 #include "debug.h"
12 #include "engine.h"
13 #include "move.h"
14 #include "playout.h"
15 #include "tactics/util.h"
16 #include "timeinfo.h"
17 #include "uct/internal.h"
18 #include "uct/prior.h"
19 #include "uct/tree.h"
20 #include "uct/slave.h"
23 /* Allocate tree node(s). The returned nodes are _not_ initialized.
24 * Returns NULL if not enough memory.
25 * This function may be called by multiple threads in parallel. */
26 static struct tree_node *
27 tree_alloc_node(struct tree *t, int count, bool fast_alloc, hash_t *hash)
29 struct tree_node *n = NULL;
30 size_t nsize = count * sizeof(*n);
31 unsigned long old_size = __sync_fetch_and_add(&t->nodes_size, nsize);
33 if (fast_alloc) {
34 if (old_size + nsize > t->max_tree_size)
35 return NULL;
36 assert(t->nodes != NULL);
37 n = (struct tree_node *)(t->nodes + old_size);
38 memset(n, 0, sizeof(*n));
39 } else {
40 n = calloc2(count, sizeof(*n));
43 if (hash) {
44 volatile static long c = 1000000;
45 *hash = __sync_fetch_and_add(&c, count);
48 return n;
51 /* Initialize a node at a given place in memory.
52 * This function may be called by multiple threads in parallel. */
53 static void
54 tree_setup_node(struct tree *t, struct tree_node *n, coord_t coord, int depth, hash_t hash)
56 n->coord = coord;
57 n->depth = depth;
58 n->hash = hash;
59 if (depth > t->max_depth)
60 t->max_depth = depth;
63 /* Allocate and initialize a node. Returns NULL (fast_alloc mode)
64 * or exits the main program if not enough memory.
65 * This function may be called by multiple threads in parallel. */
66 static struct tree_node *
67 tree_init_node(struct tree *t, coord_t coord, int depth, bool fast_alloc)
69 struct tree_node *n;
70 hash_t hash;
71 n = tree_alloc_node(t, 1, fast_alloc, &hash);
72 if (!n) return NULL;
73 tree_setup_node(t, n, coord, depth, hash);
74 return n;
77 /* Create a tree structure. Pre-allocate all nodes if max_tree_size is > 0. */
78 struct tree *
79 tree_init(struct board *board, enum stone color, unsigned long max_tree_size,
80 unsigned long max_pruned_size, unsigned long pruning_threshold, floating_t ltree_aging, int hbits)
82 struct tree *t = calloc2(1, sizeof(*t));
83 t->board = board;
84 t->max_tree_size = max_tree_size;
85 t->max_pruned_size = max_pruned_size;
86 t->pruning_threshold = pruning_threshold;
87 if (max_tree_size != 0) {
88 t->nodes = malloc2(max_tree_size);
89 /* The nodes buffer doesn't need initialization. This is currently
90 * done by tree_init_node to spread the load. Doing a memset for the
91 * entire buffer here would be too slow for large trees (>10 GB). */
93 /* The root PASS move is only virtual, we never play it. */
94 t->root = tree_init_node(t, pass, 0, t->nodes);
95 t->root_symmetry = board->symmetry;
96 t->root_color = stone_other(color); // to research black moves, root will be white
98 t->ltree_black = tree_init_node(t, pass, 0, false);
99 t->ltree_white = tree_init_node(t, pass, 0, false);
100 t->ltree_aging = ltree_aging;
102 t->hbits = hbits;
103 if (hbits) t->htable = uct_htable_alloc(hbits);
104 return t;
108 /* This function may be called by multiple threads in parallel on the
109 * same tree, but not on node n. n may be detached from the tree but
110 * must have been created in this tree originally.
111 * It returns the remaining size of the tree after n has been freed. */
112 static unsigned long
113 tree_done_node(struct tree *t, struct tree_node *n)
115 struct tree_node *ni = n->children;
116 while (ni) {
117 struct tree_node *nj = ni->sibling;
118 tree_done_node(t, ni);
119 ni = nj;
121 free(n);
122 unsigned long old_size = __sync_fetch_and_sub(&t->nodes_size, sizeof(*n));
123 return old_size - sizeof(*n);
126 struct subtree_ctx {
127 struct tree *t;
128 struct tree_node *n;
131 /* Worker thread for tree_done_node_detached(). Only for fast_alloc=false. */
132 static void *
133 tree_done_node_worker(void *ctx_)
135 struct subtree_ctx *ctx = ctx_;
136 char *str = coord2str(ctx->n->coord, ctx->t->board);
138 unsigned long tree_size = tree_done_node(ctx->t, ctx->n);
139 if (!tree_size)
140 free(ctx->t);
141 if (DEBUGL(2))
142 fprintf(stderr, "done freeing node at %s, tree size %lu\n", str, tree_size);
143 free(str);
144 free(ctx);
145 return NULL;
148 /* Asynchronously free the subtree of nodes rooted at n. If the tree becomes
149 * empty free the tree also. Only for fast_alloc=false. */
150 static void
151 tree_done_node_detached(struct tree *t, struct tree_node *n)
153 if (n->u.playouts < 1000) { // no thread for small tree
154 if (!tree_done_node(t, n))
155 free(t);
156 return;
158 pthread_attr_t attr;
159 pthread_attr_init(&attr);
160 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
162 pthread_t thread;
163 struct subtree_ctx *ctx = malloc2(sizeof(struct subtree_ctx));
164 ctx->t = t;
165 ctx->n = n;
166 pthread_create(&thread, &attr, tree_done_node_worker, ctx);
167 pthread_attr_destroy(&attr);
170 void
171 tree_done(struct tree *t)
173 tree_done_node(t, t->ltree_black);
174 tree_done_node(t, t->ltree_white);
176 if (t->htable) free(t->htable);
177 if (t->nodes) {
178 free(t->nodes);
179 free(t);
180 } else if (!tree_done_node(t, t->root)) {
181 free(t);
182 /* A tree_done_node_worker might still be running on this tree but
183 * it will free the tree later. It is also freeing nodes faster than
184 * we will create new ones. */
189 static void
190 tree_node_dump(struct tree *tree, struct tree_node *node, int treeparity, int l, int thres)
192 for (int i = 0; i < l; i++) fputc(' ', stderr);
193 int children = 0;
194 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
195 children++;
196 /* We use 1 as parity, since for all nodes we want to know the
197 * win probability of _us_, not the node color. */
198 fprintf(stderr, "[%s] %.3f/%d [prior %.3f/%d amaf %.3f/%d crit %.3f] h=%x c#=%d <%"PRIhash">\n",
199 coord2sstr(node->coord, tree->board),
200 tree_node_get_value(tree, treeparity, node->u.value), node->u.playouts,
201 tree_node_get_value(tree, treeparity, node->prior.value), node->prior.playouts,
202 tree_node_get_value(tree, treeparity, node->amaf.value), node->amaf.playouts,
203 tree_node_criticality(tree, node),
204 node->hints, children, node->hash);
206 /* Print nodes sorted by #playouts. */
208 struct tree_node *nbox[1000]; int nboxl = 0;
209 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
210 if (ni->u.playouts > thres)
211 nbox[nboxl++] = ni;
213 while (true) {
214 int best = -1;
215 for (int i = 0; i < nboxl; i++)
216 if (nbox[i] && (best < 0 || nbox[i]->u.playouts > nbox[best]->u.playouts))
217 best = i;
218 if (best < 0)
219 break;
220 tree_node_dump(tree, nbox[best], treeparity, l + 1, /* node->u.value < 0.1 ? 0 : */ thres);
221 nbox[best] = NULL;
225 void
226 tree_dump(struct tree *tree, int thres)
228 if (thres && tree->root->u.playouts / thres > 100) {
229 /* Be a bit sensible about this; the opening tbook can create
230 * huge dumps at first. */
231 thres = tree->root->u.playouts / 100 * (thres < 1000 ? 1 : thres / 1000);
233 fprintf(stderr, "(UCT tree; root %s; extra komi %f; max depth %d)\n",
234 stone2str(tree->root_color), tree->extra_komi,
235 tree->max_depth - tree->root->depth);
236 tree_node_dump(tree, tree->root, 1, 0, thres);
238 if (DEBUGL(3) && tree->ltree_black) {
239 fprintf(stderr, "B local tree:\n");
240 tree_node_dump(tree, tree->ltree_black, tree->root_color == S_WHITE ? 1 : -1, 0, thres);
241 fprintf(stderr, "W local tree:\n");
242 tree_node_dump(tree, tree->ltree_white, tree->root_color == S_BLACK ? 1 : -1, 0, thres);
247 static char *
248 tree_book_name(struct board *b)
250 static char buf[256];
251 if (b->handicap > 0) {
252 sprintf(buf, "ucttbook-%d-%02.01f-h%d.pachitree", b->size - 2, b->komi, b->handicap);
253 } else {
254 sprintf(buf, "ucttbook-%d-%02.01f.pachitree", b->size - 2, b->komi);
256 return buf;
259 static void
260 tree_node_save(FILE *f, struct tree_node *node, int thres)
262 bool save_children = node->u.playouts >= thres;
264 if (!save_children)
265 node->is_expanded = 0;
267 fputc(1, f);
268 fwrite(((void *) node) + offsetof(struct tree_node, depth),
269 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
270 1, f);
272 if (save_children) {
273 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
274 tree_node_save(f, ni, thres);
275 } else {
276 if (node->children)
277 node->is_expanded = 1;
280 fputc(0, f);
283 void
284 tree_save(struct tree *tree, struct board *b, int thres)
286 char *filename = tree_book_name(b);
287 FILE *f = fopen(filename, "wb");
288 if (!f) {
289 perror("fopen");
290 return;
292 tree_node_save(f, tree->root, thres);
293 fputc(0, f);
294 fclose(f);
298 void
299 tree_node_load(FILE *f, struct tree_node *node, int *num)
301 (*num)++;
303 fread(((void *) node) + offsetof(struct tree_node, depth),
304 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
305 1, f);
307 /* Keep values in sane scale, otherwise we start overflowing. */
308 #define MAX_PLAYOUTS 10000000
309 if (node->u.playouts > MAX_PLAYOUTS) {
310 node->u.playouts = MAX_PLAYOUTS;
312 if (node->amaf.playouts > MAX_PLAYOUTS) {
313 node->amaf.playouts = MAX_PLAYOUTS;
315 memcpy(&node->pu, &node->u, sizeof(node->u));
317 struct tree_node *ni = NULL, *ni_prev = NULL;
318 while (fgetc(f)) {
319 ni_prev = ni; ni = calloc2(1, sizeof(*ni));
320 if (!node->children)
321 node->children = ni;
322 else
323 ni_prev->sibling = ni;
324 ni->parent = node;
325 tree_node_load(f, ni, num);
329 void
330 tree_load(struct tree *tree, struct board *b)
332 char *filename = tree_book_name(b);
333 FILE *f = fopen(filename, "rb");
334 if (!f)
335 return;
337 fprintf(stderr, "Loading opening tbook %s...\n", filename);
339 int num = 0;
340 if (fgetc(f))
341 tree_node_load(f, tree->root, &num);
342 fprintf(stderr, "Loaded %d nodes.\n", num);
344 fclose(f);
348 /* Copy the subtree rooted at node: all nodes at or below depth
349 * or with at least threshold playouts. Only for fast_alloc.
350 * The code is destructive on src. The relative order of children of
351 * a given node is preserved (assumed by tree_get_node in particular).
352 * Returns the copy of node in the destination tree, or NULL
353 * if we could not copy it. */
354 static struct tree_node *
355 tree_prune(struct tree *dest, struct tree *src, struct tree_node *node,
356 int threshold, int depth)
358 assert(dest->nodes && node);
359 struct tree_node *n2 = tree_alloc_node(dest, 1, true, NULL);
360 if (!n2)
361 return NULL;
362 *n2 = *node;
363 if (n2->depth > dest->max_depth)
364 dest->max_depth = n2->depth;
365 n2->children = NULL;
366 n2->is_expanded = false;
368 if (node->depth >= depth && node->u.playouts < threshold)
369 return n2;
370 /* For deep nodes with many playouts, we must copy all children,
371 * even those with zero playouts, because partially expanded
372 * nodes are not supported. Considering them as fully expanded
373 * would degrade the playing strength. The only exception is
374 * when dest becomes full, but this should never happen in practice
375 * if threshold is chosen to limit the number of nodes traversed. */
376 struct tree_node *ni = node->children;
377 if (!ni)
378 return n2;
379 struct tree_node **prev2 = &(n2->children);
380 while (ni) {
381 struct tree_node *ni2 = tree_prune(dest, src, ni, threshold, depth);
382 if (!ni2) break;
383 *prev2 = ni2;
384 prev2 = &(ni2->sibling);
385 ni2->parent = n2;
386 ni = ni->sibling;
388 if (!ni) {
389 n2->is_expanded = true;
390 } else {
391 n2->children = NULL; // avoid partially expanded nodes
393 return n2;
396 /* The following constants are used for garbage collection of nodes.
397 * A tree is considered large if the top node has >= 40K playouts.
398 * For such trees, we copy deep nodes only if they have enough
399 * playouts, with a gradually increasing threshold up to 40.
400 * These constants define how much time we're willing to spend
401 * scanning the source tree when promoting a move. The chosen values
402 * make worst case pruning in about 3s for 20 GB ram, and this
403 * is only for long thinking time (>1M playouts). For fast games the
404 * trees don't grow large. For small ram or fast game we copy the
405 * entire tree. These values do not degrade playing strength and are
406 * necessary to avoid losing on time; increasing DEEP_PLAYOUTS_THRESHOLD
407 * or decreasing LARGE_TREE_PLAYOUTS will make the program faster but
408 * playing worse. */
409 #define LARGE_TREE_PLAYOUTS 40000LL
410 #define DEEP_PLAYOUTS_THRESHOLD 40
412 /* Garbage collect the tree early if the top node has < 5K playouts,
413 * to avoid having to do it later on a large subtree.
414 * This guarantees garbage collection in < 1s. */
415 #define SMALL_TREE_PLAYOUTS 5000
417 /* Free all the tree, keeping only the subtree rooted at node.
418 * Prune the subtree if necessary to fit in memory or
419 * to save time scanning the tree.
420 * Returns the moved node. Only for fast_alloc. */
421 struct tree_node *
422 tree_garbage_collect(struct tree *tree, struct tree_node *node)
424 assert(tree->nodes && !node->parent && !node->sibling);
425 double start_time = time_now();
426 unsigned long orig_size = tree->nodes_size;
428 struct tree *temp_tree = tree_init(tree->board, tree->root_color,
429 tree->max_pruned_size, 0, 0, tree->ltree_aging, 0);
430 temp_tree->nodes_size = 0; // We do not want the dummy pass node
431 struct tree_node *temp_node;
433 /* Find the maximum depth at which we can copy all nodes. */
434 int max_nodes = 1;
435 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
436 max_nodes++;
437 unsigned long nodes_size = max_nodes * sizeof(*node);
438 int max_depth = node->depth;
439 while (nodes_size < tree->max_pruned_size && max_nodes > 1) {
440 max_nodes--;
441 nodes_size += max_nodes * nodes_size;
442 max_depth++;
445 /* Copy all nodes for small trees. For large trees, copy all nodes
446 * with depth <= max_depth, and all nodes with enough playouts.
447 * Avoiding going too deep (except for nodes with many playouts) is mostly
448 * to save time scanning the source tree. It can take over 20s to traverse
449 * completely a large source tree (20 GB) even without copying because
450 * the traversal is not friendly at all with the memory cache. */
451 int threshold = (node->u.playouts - LARGE_TREE_PLAYOUTS) * DEEP_PLAYOUTS_THRESHOLD / LARGE_TREE_PLAYOUTS;
452 if (threshold < 0) threshold = 0;
453 if (threshold > DEEP_PLAYOUTS_THRESHOLD) threshold = DEEP_PLAYOUTS_THRESHOLD;
454 temp_node = tree_prune(temp_tree, tree, node, threshold, max_depth);
455 assert(temp_node);
457 /* Now copy back to original tree. */
458 tree->nodes_size = 0;
459 tree->max_depth = 0;
460 struct tree_node *new_node = tree_prune(tree, temp_tree, temp_node, 0, temp_tree->max_depth);
462 if (DEBUGL(1)) {
463 double now = time_now();
464 static double prev_time;
465 if (!prev_time) prev_time = start_time;
466 fprintf(stderr,
467 "tree pruned in %0.6g s, prev %0.3g s ago, dest depth %d wanted %d,"
468 " size %lu->%lu/%lu, playouts %d\n",
469 now - start_time, start_time - prev_time, temp_tree->max_depth, max_depth,
470 orig_size, temp_tree->nodes_size, tree->max_pruned_size, new_node->u.playouts);
471 prev_time = start_time;
473 if (temp_tree->nodes_size >= temp_tree->max_tree_size) {
474 fprintf(stderr, "temp tree overflow, max_tree_size %lu, pruning_threshold %lu\n",
475 tree->max_tree_size, tree->pruning_threshold);
476 /* This is not a serious problem, we will simply recompute the discarded nodes
477 * at the next move if necessary. This is better than frequently wasting memory. */
478 } else {
479 assert(tree->nodes_size == temp_tree->nodes_size);
480 assert(tree->max_depth == temp_tree->max_depth);
482 tree_done(temp_tree);
483 return new_node;
487 /* Get a node of given coordinate from within parent, possibly creating it
488 * if necessary - in a very raw form (no .d, priors, ...). */
489 /* FIXME: Adjust for board symmetry. */
490 struct tree_node *
491 tree_get_node(struct tree *t, struct tree_node *parent, coord_t c, bool create)
493 if (!parent->children || parent->children->coord >= c) {
494 /* Special case: Insertion at the beginning. */
495 if (parent->children && parent->children->coord == c)
496 return parent->children;
497 if (!create)
498 return NULL;
500 struct tree_node *nn = tree_init_node(t, c, parent->depth + 1, false);
501 nn->parent = parent; nn->sibling = parent->children;
502 parent->children = nn;
503 return nn;
506 /* No candidate at the beginning, look through all the children. */
508 struct tree_node *ni;
509 for (ni = parent->children; ni->sibling; ni = ni->sibling)
510 if (ni->sibling->coord >= c)
511 break;
513 if (ni->sibling && ni->sibling->coord == c)
514 return ni->sibling;
515 assert(ni->coord < c);
516 if (!create)
517 return NULL;
519 struct tree_node *nn = tree_init_node(t, c, parent->depth + 1, false);
520 nn->parent = parent; nn->sibling = ni->sibling; ni->sibling = nn;
521 return nn;
524 /* Get local tree node corresponding to given node, given local node child
525 * iterator @lni (which points either at the corresponding node, or at the
526 * nearest local tree node after @ni). */
527 struct tree_node *
528 tree_lnode_for_node(struct tree *tree, struct tree_node *ni, struct tree_node *lni, int tenuki_d)
530 /* Now set up lnode, which is the actual local node
531 * corresponding to ni - either lni if it is an
532 * exact match and ni is not tenuki, <pass> local
533 * node if ni is tenuki, or NULL if there is no
534 * corresponding node available. */
536 if (is_pass(ni->coord)) {
537 /* Also, for sanity reasons we never use local
538 * tree for passes. (Maybe we could, but it's
539 * too hard to think about.) */
540 return NULL;
543 if (lni->coord == ni->coord) {
544 /* We don't consider tenuki a sequence play
545 * that we have in local tree even though
546 * ni->d is too high; this can happen if this
547 * occured in different board topology. */
548 return lni;
551 if (ni->d >= tenuki_d) {
552 /* Tenuki, pick a pass lsibling if available. */
553 assert(lni->parent && lni->parent->children);
554 if (is_pass(lni->parent->children->coord)) {
555 return lni->parent->children;
556 } else {
557 return NULL;
561 /* No corresponding local node, lnode stays NULL. */
562 return NULL;
566 /* Tree symmetry: When possible, we will localize the tree to a single part
567 * of the board in tree_expand_node() and possibly flip along symmetry axes
568 * to another part of the board in tree_promote_at(). We follow b->symmetry
569 * guidelines here. */
572 /* This function must be thread safe, given that board b is only modified by the calling thread. */
573 void
574 tree_expand_node(struct tree *t, struct tree_node *node, struct board *b, enum stone color, struct uct *u, int parity)
576 /* Get a Common Fate Graph distance map from parent node. */
577 int distances[board_size2(b)];
578 if (!is_pass(b->last_move.coord) && !is_resign(b->last_move.coord)) {
579 cfg_distances(b, node->coord, distances, TREE_NODE_D_MAX);
580 } else {
581 // Pass or resign - everything is too far.
582 foreach_point(b) { distances[c] = TREE_NODE_D_MAX + 1; } foreach_point_end;
585 /* Get a map of prior values to initialize the new nodes with. */
586 struct prior_map map = {
587 .b = b,
588 .to_play = color,
589 .parity = tree_parity(t, parity),
590 .distances = distances,
592 // Include pass in the prior map.
593 struct move_stats map_prior[board_size2(b) + 1]; map.prior = &map_prior[1];
594 bool map_consider[board_size2(b) + 1]; map.consider = &map_consider[1];
595 memset(map_prior, 0, sizeof(map_prior));
596 memset(map_consider, 0, sizeof(map_consider));
597 map.consider[pass] = true;
598 foreach_free_point(b) {
599 assert(board_at(b, c) == S_NONE);
600 if (!board_is_valid_play(b, color, c))
601 continue;
602 map.consider[c] = true;
603 } foreach_free_point_end;
604 uct_prior(u, node, &map);
606 /* Now, create the nodes. */
607 struct tree_node *ni = tree_init_node(t, pass, node->depth + 1, t->nodes);
608 /* In fast_alloc mode we might temporarily run out of nodes but this should be rare. */
609 if (!ni) {
610 node->is_expanded = false;
611 return;
613 struct tree_node *first_child = ni;
614 ni->parent = node;
615 ni->prior = map.prior[pass]; ni->d = TREE_NODE_D_MAX + 1;
617 /* The loop considers only the symmetry playground. */
618 if (UDEBUGL(6)) {
619 fprintf(stderr, "expanding %s within [%d,%d],[%d,%d] %d-%d\n",
620 coord2sstr(node->coord, b),
621 b->symmetry.x1, b->symmetry.y1,
622 b->symmetry.x2, b->symmetry.y2,
623 b->symmetry.type, b->symmetry.d);
625 for (int j = b->symmetry.y1; j <= b->symmetry.y2; j++) {
626 for (int i = b->symmetry.x1; i <= b->symmetry.x2; i++) {
627 if (b->symmetry.d) {
628 int x = b->symmetry.type == SYM_DIAG_DOWN ? board_size(b) - 1 - i : i;
629 if (x > j) {
630 if (UDEBUGL(7))
631 fprintf(stderr, "drop %d,%d\n", i, j);
632 continue;
636 coord_t c = coord_xy(t->board, i, j);
637 if (!map.consider[c]) // Filter out invalid moves
638 continue;
639 assert(c != node->coord); // I have spotted "C3 C3" in some sequence...
641 struct tree_node *nj = tree_init_node(t, c, node->depth + 1, t->nodes);
642 if (!nj) {
643 node->is_expanded = false;
644 return;
646 nj->parent = node; ni->sibling = nj; ni = nj;
648 ni->prior = map.prior[c];
649 ni->d = distances[c];
652 node->children = first_child; // must be done at the end to avoid race
656 static coord_t
657 flip_coord(struct board *b, coord_t c,
658 bool flip_horiz, bool flip_vert, int flip_diag)
660 int x = coord_x(c, b), y = coord_y(c, b);
661 if (flip_diag) {
662 int z = x; x = y; y = z;
664 if (flip_horiz) {
665 x = board_size(b) - 1 - x;
667 if (flip_vert) {
668 y = board_size(b) - 1 - y;
670 return coord_xy(b, x, y);
673 static void
674 tree_fix_node_symmetry(struct board *b, struct tree_node *node,
675 bool flip_horiz, bool flip_vert, int flip_diag)
677 if (!is_pass(node->coord))
678 node->coord = flip_coord(b, node->coord, flip_horiz, flip_vert, flip_diag);
680 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
681 tree_fix_node_symmetry(b, ni, flip_horiz, flip_vert, flip_diag);
684 static void
685 tree_fix_symmetry(struct tree *tree, struct board *b, coord_t c)
687 if (is_pass(c))
688 return;
690 struct board_symmetry *s = &tree->root_symmetry;
691 int cx = coord_x(c, b), cy = coord_y(c, b);
693 /* playground X->h->v->d normalization
694 * :::.. .d...
695 * .::.. v....
696 * ..:.. .....
697 * ..... h...X
698 * ..... ..... */
699 bool flip_horiz = cx < s->x1 || cx > s->x2;
700 bool flip_vert = cy < s->y1 || cy > s->y2;
702 bool flip_diag = 0;
703 if (s->d) {
704 bool dir = (s->type == SYM_DIAG_DOWN);
705 int x = dir ^ flip_horiz ^ flip_vert ? board_size(b) - 1 - cx : cx;
706 if (flip_vert ? x < cy : x > cy) {
707 flip_diag = 1;
711 if (DEBUGL(4)) {
712 fprintf(stderr, "%s [%d,%d -> %d,%d;%d,%d] will flip %d %d %d -> %s, sym %d (%d) -> %d (%d)\n",
713 coord2sstr(c, b),
714 cx, cy, s->x1, s->y1, s->x2, s->y2,
715 flip_horiz, flip_vert, flip_diag,
716 coord2sstr(flip_coord(b, c, flip_horiz, flip_vert, flip_diag), b),
717 s->type, s->d, b->symmetry.type, b->symmetry.d);
719 if (flip_horiz || flip_vert || flip_diag)
720 tree_fix_node_symmetry(b, tree->root, flip_horiz, flip_vert, flip_diag);
724 static void
725 tree_unlink_node(struct tree_node *node)
727 struct tree_node *ni = node->parent;
728 if (ni->children == node) {
729 ni->children = node->sibling;
730 } else {
731 ni = ni->children;
732 while (ni->sibling != node)
733 ni = ni->sibling;
734 ni->sibling = node->sibling;
736 node->sibling = NULL;
737 node->parent = NULL;
740 /* Reduce weight of statistics on promotion. Remove nodes that
741 * get reduced to zero playouts; returns next node to consider
742 * in the children list (@node may get deleted). */
743 static struct tree_node *
744 tree_age_node(struct tree *tree, struct tree_node *node)
746 node->u.playouts /= tree->ltree_aging;
747 if (node->parent && !node->u.playouts) {
748 struct tree_node *sibling = node->sibling;
749 /* Delete node, no playouts. */
750 tree_unlink_node(node);
751 tree_done_node(tree, node);
752 return sibling;
755 struct tree_node *ni = node->children;
756 while (ni) ni = tree_age_node(tree, ni);
757 return node->sibling;
760 /* Promotes the given node as the root of the tree. In the fast_alloc
761 * mode, the node may be moved and some of its subtree may be pruned. */
762 void
763 tree_promote_node(struct tree *tree, struct tree_node **node)
765 assert((*node)->parent == tree->root);
766 tree_unlink_node(*node);
767 if (!tree->nodes) {
768 /* Freeing the rest of the tree can take several seconds on large
769 * trees, so we must do it asynchronously: */
770 tree_done_node_detached(tree, tree->root);
771 } else {
772 /* Garbage collect if we run out of memory, or it is cheap to do so now: */
773 if (tree->nodes_size >= tree->pruning_threshold
774 || (tree->nodes_size >= tree->max_tree_size / 10 && (*node)->u.playouts < SMALL_TREE_PLAYOUTS))
775 *node = tree_garbage_collect(tree, *node);
777 tree->root = *node;
778 tree->root_color = stone_other(tree->root_color);
780 board_symmetry_update(tree->board, &tree->root_symmetry, (*node)->coord);
781 /* See tree.score description for explanation on why don't we zero
782 * score on node promotion. */
783 // tree->score.playouts = 0;
785 /* If the tree deepest node was under node, or if we called tree_garbage_collect,
786 * tree->max_depth is correct. Otherwise we could traverse the tree
787 * to recompute max_depth but it's not worth it: it's just for debugging
788 * and soon the tree will grow and max_depth will become correct again. */
790 if (tree->ltree_aging != 1.0f) { // XXX: != should work here even with the floating_t
791 tree_age_node(tree, tree->ltree_black);
792 tree_age_node(tree, tree->ltree_white);
796 bool
797 tree_promote_at(struct tree *tree, struct board *b, coord_t c)
799 tree_fix_symmetry(tree, b, c);
801 for (struct tree_node *ni = tree->root->children; ni; ni = ni->sibling) {
802 if (ni->coord == c) {
803 tree_promote_node(tree, &ni);
804 return true;
807 return false;