Split off BOARD_SPATPROB; board.spatprob is not interesting for us so far
[pachi.git] / uct / tree.c
blob4c38582891e0d41c4b644239b26f4c7594eadd2b
1 #include <assert.h>
2 #include <math.h>
3 #include <stddef.h>
4 #include <stdint.h>
5 #include <stdio.h>
6 #include <stdlib.h>
7 #include <string.h>
9 #define DEBUG
10 #include "board.h"
11 #include "debug.h"
12 #include "engine.h"
13 #include "move.h"
14 #include "playout.h"
15 #include "tactics/util.h"
16 #include "timeinfo.h"
17 #include "uct/internal.h"
18 #include "uct/prior.h"
19 #include "uct/tree.h"
20 #include "uct/slave.h"
23 /* Allocate tree node(s). The returned nodes are initialized with zeroes.
24 * Returns NULL if not enough memory.
25 * This function may be called by multiple threads in parallel. */
26 static struct tree_node *
27 tree_alloc_node(struct tree *t, int count, bool fast_alloc)
29 struct tree_node *n = NULL;
30 size_t nsize = count * sizeof(*n);
31 unsigned long old_size = __sync_fetch_and_add(&t->nodes_size, nsize);
33 if (fast_alloc) {
34 if (old_size + nsize > t->max_tree_size)
35 return NULL;
36 assert(t->nodes != NULL);
37 n = (struct tree_node *)(t->nodes + old_size);
38 memset(n, 0, nsize);
39 } else {
40 n = calloc2(count, sizeof(*n));
42 return n;
45 /* Initialize a node at a given place in memory.
46 * This function may be called by multiple threads in parallel. */
47 static void
48 tree_setup_node(struct tree *t, struct tree_node *n, coord_t coord, int depth)
50 static volatile unsigned int hash = 0;
51 n->coord = coord;
52 n->depth = depth;
53 /* n->hash is used only for debugging. It is very likely (but not
54 * guaranteed) to be unique. */
55 hash_t h = n - (struct tree_node *)0;
56 n->hash = (h << 32) + (hash++ & 0xffffffff);
57 if (depth > t->max_depth)
58 t->max_depth = depth;
61 /* Allocate and initialize a node. Returns NULL (fast_alloc mode)
62 * or exits the main program if not enough memory.
63 * This function may be called by multiple threads in parallel. */
64 static struct tree_node *
65 tree_init_node(struct tree *t, coord_t coord, int depth, bool fast_alloc)
67 struct tree_node *n;
68 n = tree_alloc_node(t, 1, fast_alloc);
69 if (!n) return NULL;
70 tree_setup_node(t, n, coord, depth);
71 return n;
74 /* Create a tree structure. Pre-allocate all nodes if max_tree_size is > 0. */
75 struct tree *
76 tree_init(struct board *board, enum stone color, unsigned long max_tree_size,
77 unsigned long max_pruned_size, unsigned long pruning_threshold, floating_t ltree_aging, int hbits)
79 struct tree *t = calloc2(1, sizeof(*t));
80 t->board = board;
81 t->max_tree_size = max_tree_size;
82 t->max_pruned_size = max_pruned_size;
83 t->pruning_threshold = pruning_threshold;
84 if (max_tree_size != 0) {
85 t->nodes = malloc2(max_tree_size);
86 /* The nodes buffer doesn't need initialization. This is currently
87 * done by tree_init_node to spread the load. Doing a memset for the
88 * entire buffer here would be too slow for large trees (>10 GB). */
90 /* The root PASS move is only virtual, we never play it. */
91 t->root = tree_init_node(t, pass, 0, t->nodes);
92 t->root_symmetry = board->symmetry;
93 t->root_color = stone_other(color); // to research black moves, root will be white
95 t->ltree_black = tree_init_node(t, pass, 0, false);
96 t->ltree_white = tree_init_node(t, pass, 0, false);
97 t->ltree_aging = ltree_aging;
99 t->hbits = hbits;
100 if (hbits) t->htable = uct_htable_alloc(hbits);
101 return t;
105 /* This function may be called by multiple threads in parallel on the
106 * same tree, but not on node n. n may be detached from the tree but
107 * must have been created in this tree originally.
108 * It returns the remaining size of the tree after n has been freed. */
109 static unsigned long
110 tree_done_node(struct tree *t, struct tree_node *n)
112 struct tree_node *ni = n->children;
113 while (ni) {
114 struct tree_node *nj = ni->sibling;
115 tree_done_node(t, ni);
116 ni = nj;
118 free(n);
119 unsigned long old_size = __sync_fetch_and_sub(&t->nodes_size, sizeof(*n));
120 return old_size - sizeof(*n);
123 struct subtree_ctx {
124 struct tree *t;
125 struct tree_node *n;
128 /* Worker thread for tree_done_node_detached(). Only for fast_alloc=false. */
129 static void *
130 tree_done_node_worker(void *ctx_)
132 struct subtree_ctx *ctx = ctx_;
133 char *str = coord2str(node_coord(ctx->n), ctx->t->board);
135 unsigned long tree_size = tree_done_node(ctx->t, ctx->n);
136 if (!tree_size)
137 free(ctx->t);
138 if (DEBUGL(2))
139 fprintf(stderr, "done freeing node at %s, tree size %lu\n", str, tree_size);
140 free(str);
141 free(ctx);
142 return NULL;
145 /* Asynchronously free the subtree of nodes rooted at n. If the tree becomes
146 * empty free the tree also. Only for fast_alloc=false. */
147 static void
148 tree_done_node_detached(struct tree *t, struct tree_node *n)
150 if (n->u.playouts < 1000) { // no thread for small tree
151 if (!tree_done_node(t, n))
152 free(t);
153 return;
155 pthread_attr_t attr;
156 pthread_attr_init(&attr);
157 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
159 pthread_t thread;
160 struct subtree_ctx *ctx = malloc2(sizeof(struct subtree_ctx));
161 ctx->t = t;
162 ctx->n = n;
163 pthread_create(&thread, &attr, tree_done_node_worker, ctx);
164 pthread_attr_destroy(&attr);
167 void
168 tree_done(struct tree *t)
170 tree_done_node(t, t->ltree_black);
171 tree_done_node(t, t->ltree_white);
173 if (t->htable) free(t->htable);
174 if (t->nodes) {
175 free(t->nodes);
176 free(t);
177 } else if (!tree_done_node(t, t->root)) {
178 free(t);
179 /* A tree_done_node_worker might still be running on this tree but
180 * it will free the tree later. It is also freeing nodes faster than
181 * we will create new ones. */
186 static void
187 tree_node_dump(struct tree *tree, struct tree_node *node, int treeparity, int l, int thres)
189 for (int i = 0; i < l; i++) fputc(' ', stderr);
190 int children = 0;
191 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
192 children++;
193 /* We use 1 as parity, since for all nodes we want to know the
194 * win probability of _us_, not the node color. */
195 fprintf(stderr, "[%s] %.3f/%d [prior %.3f/%d amaf %.3f/%d crit %.3f] h=%x c#=%d <%"PRIhash">\n",
196 coord2sstr(node_coord(node), tree->board),
197 tree_node_get_value(tree, treeparity, node->u.value), node->u.playouts,
198 tree_node_get_value(tree, treeparity, node->prior.value), node->prior.playouts,
199 tree_node_get_value(tree, treeparity, node->amaf.value), node->amaf.playouts,
200 tree_node_criticality(tree, node),
201 node->hints, children, node->hash);
203 /* Print nodes sorted by #playouts. */
205 struct tree_node *nbox[1000]; int nboxl = 0;
206 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
207 if (ni->u.playouts > thres)
208 nbox[nboxl++] = ni;
210 while (true) {
211 int best = -1;
212 for (int i = 0; i < nboxl; i++)
213 if (nbox[i] && (best < 0 || nbox[i]->u.playouts > nbox[best]->u.playouts))
214 best = i;
215 if (best < 0)
216 break;
217 tree_node_dump(tree, nbox[best], treeparity, l + 1, /* node->u.value < 0.1 ? 0 : */ thres);
218 nbox[best] = NULL;
222 void
223 tree_dump(struct tree *tree, int thres)
225 if (thres && tree->root->u.playouts / thres > 100) {
226 /* Be a bit sensible about this; the opening tbook can create
227 * huge dumps at first. */
228 thres = tree->root->u.playouts / 100 * (thres < 1000 ? 1 : thres / 1000);
230 fprintf(stderr, "(UCT tree; root %s; extra komi %f; max depth %d)\n",
231 stone2str(tree->root_color), tree->extra_komi,
232 tree->max_depth - tree->root->depth);
233 tree_node_dump(tree, tree->root, 1, 0, thres);
235 if (DEBUGL(3) && tree->ltree_black) {
236 fprintf(stderr, "B local tree:\n");
237 tree_node_dump(tree, tree->ltree_black, tree->root_color == S_WHITE ? 1 : -1, 0, thres);
238 fprintf(stderr, "W local tree:\n");
239 tree_node_dump(tree, tree->ltree_white, tree->root_color == S_BLACK ? 1 : -1, 0, thres);
244 static char *
245 tree_book_name(struct board *b)
247 static char buf[256];
248 if (b->handicap > 0) {
249 sprintf(buf, "ucttbook-%d-%02.01f-h%d.pachitree", b->size - 2, b->komi, b->handicap);
250 } else {
251 sprintf(buf, "ucttbook-%d-%02.01f.pachitree", b->size - 2, b->komi);
253 return buf;
256 static void
257 tree_node_save(FILE *f, struct tree_node *node, int thres)
259 bool save_children = node->u.playouts >= thres;
261 if (!save_children)
262 node->is_expanded = 0;
264 fputc(1, f);
265 fwrite(((void *) node) + offsetof(struct tree_node, depth),
266 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
267 1, f);
269 if (save_children) {
270 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
271 tree_node_save(f, ni, thres);
272 } else {
273 if (node->children)
274 node->is_expanded = 1;
277 fputc(0, f);
280 void
281 tree_save(struct tree *tree, struct board *b, int thres)
283 char *filename = tree_book_name(b);
284 FILE *f = fopen(filename, "wb");
285 if (!f) {
286 perror("fopen");
287 return;
289 tree_node_save(f, tree->root, thres);
290 fputc(0, f);
291 fclose(f);
295 void
296 tree_node_load(FILE *f, struct tree_node *node, int *num)
298 (*num)++;
300 fread(((void *) node) + offsetof(struct tree_node, depth),
301 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
302 1, f);
304 /* Keep values in sane scale, otherwise we start overflowing. */
305 #define MAX_PLAYOUTS 10000000
306 if (node->u.playouts > MAX_PLAYOUTS) {
307 node->u.playouts = MAX_PLAYOUTS;
309 if (node->amaf.playouts > MAX_PLAYOUTS) {
310 node->amaf.playouts = MAX_PLAYOUTS;
312 memcpy(&node->pu, &node->u, sizeof(node->u));
314 struct tree_node *ni = NULL, *ni_prev = NULL;
315 while (fgetc(f)) {
316 ni_prev = ni; ni = calloc2(1, sizeof(*ni));
317 if (!node->children)
318 node->children = ni;
319 else
320 ni_prev->sibling = ni;
321 ni->parent = node;
322 tree_node_load(f, ni, num);
326 void
327 tree_load(struct tree *tree, struct board *b)
329 char *filename = tree_book_name(b);
330 FILE *f = fopen(filename, "rb");
331 if (!f)
332 return;
334 fprintf(stderr, "Loading opening tbook %s...\n", filename);
336 int num = 0;
337 if (fgetc(f))
338 tree_node_load(f, tree->root, &num);
339 fprintf(stderr, "Loaded %d nodes.\n", num);
341 fclose(f);
345 /* Copy the subtree rooted at node: all nodes at or below depth
346 * or with at least threshold playouts. Only for fast_alloc.
347 * The code is destructive on src. The relative order of children of
348 * a given node is preserved (assumed by tree_get_node in particular).
349 * Returns the copy of node in the destination tree, or NULL
350 * if we could not copy it. */
351 static struct tree_node *
352 tree_prune(struct tree *dest, struct tree *src, struct tree_node *node,
353 int threshold, int depth)
355 assert(dest->nodes && node);
356 struct tree_node *n2 = tree_alloc_node(dest, 1, true);
357 if (!n2)
358 return NULL;
359 *n2 = *node;
360 if (n2->depth > dest->max_depth)
361 dest->max_depth = n2->depth;
362 n2->children = NULL;
363 n2->is_expanded = false;
365 if (node->depth >= depth && node->u.playouts < threshold)
366 return n2;
367 /* For deep nodes with many playouts, we must copy all children,
368 * even those with zero playouts, because partially expanded
369 * nodes are not supported. Considering them as fully expanded
370 * would degrade the playing strength. The only exception is
371 * when dest becomes full, but this should never happen in practice
372 * if threshold is chosen to limit the number of nodes traversed. */
373 struct tree_node *ni = node->children;
374 if (!ni)
375 return n2;
376 struct tree_node **prev2 = &(n2->children);
377 while (ni) {
378 struct tree_node *ni2 = tree_prune(dest, src, ni, threshold, depth);
379 if (!ni2) break;
380 *prev2 = ni2;
381 prev2 = &(ni2->sibling);
382 ni2->parent = n2;
383 ni = ni->sibling;
385 if (!ni) {
386 n2->is_expanded = true;
387 } else {
388 n2->children = NULL; // avoid partially expanded nodes
390 return n2;
393 /* The following constants are used for garbage collection of nodes.
394 * A tree is considered large if the top node has >= 40K playouts.
395 * For such trees, we copy deep nodes only if they have enough
396 * playouts, with a gradually increasing threshold up to 40.
397 * These constants define how much time we're willing to spend
398 * scanning the source tree when promoting a move. The chosen values
399 * make worst case pruning in about 3s for 20 GB ram, and this
400 * is only for long thinking time (>1M playouts). For fast games the
401 * trees don't grow large. For small ram or fast game we copy the
402 * entire tree. These values do not degrade playing strength and are
403 * necessary to avoid losing on time; increasing DEEP_PLAYOUTS_THRESHOLD
404 * or decreasing LARGE_TREE_PLAYOUTS will make the program faster but
405 * playing worse. */
406 #define LARGE_TREE_PLAYOUTS 40000LL
407 #define DEEP_PLAYOUTS_THRESHOLD 40
409 /* Garbage collect the tree early if the top node has < 5K playouts,
410 * to avoid having to do it later on a large subtree.
411 * This guarantees garbage collection in < 1s. */
412 #define SMALL_TREE_PLAYOUTS 5000
414 /* Free all the tree, keeping only the subtree rooted at node.
415 * Prune the subtree if necessary to fit in memory or
416 * to save time scanning the tree.
417 * Returns the moved node. Only for fast_alloc. */
418 struct tree_node *
419 tree_garbage_collect(struct tree *tree, struct tree_node *node)
421 assert(tree->nodes && !node->parent && !node->sibling);
422 double start_time = time_now();
423 unsigned long orig_size = tree->nodes_size;
425 struct tree *temp_tree = tree_init(tree->board, tree->root_color,
426 tree->max_pruned_size, 0, 0, tree->ltree_aging, 0);
427 temp_tree->nodes_size = 0; // We do not want the dummy pass node
428 struct tree_node *temp_node;
430 /* Find the maximum depth at which we can copy all nodes. */
431 int max_nodes = 1;
432 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
433 max_nodes++;
434 unsigned long nodes_size = max_nodes * sizeof(*node);
435 int max_depth = node->depth;
436 while (nodes_size < tree->max_pruned_size && max_nodes > 1) {
437 max_nodes--;
438 nodes_size += max_nodes * nodes_size;
439 max_depth++;
442 /* Copy all nodes for small trees. For large trees, copy all nodes
443 * with depth <= max_depth, and all nodes with enough playouts.
444 * Avoiding going too deep (except for nodes with many playouts) is mostly
445 * to save time scanning the source tree. It can take over 20s to traverse
446 * completely a large source tree (20 GB) even without copying because
447 * the traversal is not friendly at all with the memory cache. */
448 int threshold = (node->u.playouts - LARGE_TREE_PLAYOUTS) * DEEP_PLAYOUTS_THRESHOLD / LARGE_TREE_PLAYOUTS;
449 if (threshold < 0) threshold = 0;
450 if (threshold > DEEP_PLAYOUTS_THRESHOLD) threshold = DEEP_PLAYOUTS_THRESHOLD;
451 temp_node = tree_prune(temp_tree, tree, node, threshold, max_depth);
452 assert(temp_node);
454 /* Now copy back to original tree. */
455 tree->nodes_size = 0;
456 tree->max_depth = 0;
457 struct tree_node *new_node = tree_prune(tree, temp_tree, temp_node, 0, temp_tree->max_depth);
459 if (DEBUGL(1)) {
460 double now = time_now();
461 static double prev_time;
462 if (!prev_time) prev_time = start_time;
463 fprintf(stderr,
464 "tree pruned in %0.6g s, prev %0.3g s ago, dest depth %d wanted %d,"
465 " size %lu->%lu/%lu, playouts %d\n",
466 now - start_time, start_time - prev_time, temp_tree->max_depth, max_depth,
467 orig_size, temp_tree->nodes_size, tree->max_pruned_size, new_node->u.playouts);
468 prev_time = start_time;
470 if (temp_tree->nodes_size >= temp_tree->max_tree_size) {
471 fprintf(stderr, "temp tree overflow, max_tree_size %lu, pruning_threshold %lu\n",
472 tree->max_tree_size, tree->pruning_threshold);
473 /* This is not a serious problem, we will simply recompute the discarded nodes
474 * at the next move if necessary. This is better than frequently wasting memory. */
475 } else {
476 assert(tree->nodes_size == temp_tree->nodes_size);
477 assert(tree->max_depth == temp_tree->max_depth);
479 tree_done(temp_tree);
480 return new_node;
484 /* Get a node of given coordinate from within parent, possibly creating it
485 * if necessary - in a very raw form (no .d, priors, ...). */
486 /* FIXME: Adjust for board symmetry. */
487 struct tree_node *
488 tree_get_node(struct tree *t, struct tree_node *parent, coord_t c, bool create)
490 if (!parent->children || node_coord(parent->children) >= c) {
491 /* Special case: Insertion at the beginning. */
492 if (parent->children && node_coord(parent->children) == c)
493 return parent->children;
494 if (!create)
495 return NULL;
497 struct tree_node *nn = tree_init_node(t, c, parent->depth + 1, false);
498 nn->parent = parent; nn->sibling = parent->children;
499 parent->children = nn;
500 return nn;
503 /* No candidate at the beginning, look through all the children. */
505 struct tree_node *ni;
506 for (ni = parent->children; ni->sibling; ni = ni->sibling)
507 if (node_coord(ni->sibling) >= c)
508 break;
510 if (ni->sibling && node_coord(ni->sibling) == c)
511 return ni->sibling;
512 assert(node_coord(ni) < c);
513 if (!create)
514 return NULL;
516 struct tree_node *nn = tree_init_node(t, c, parent->depth + 1, false);
517 nn->parent = parent; nn->sibling = ni->sibling; ni->sibling = nn;
518 return nn;
521 /* Get local tree node corresponding to given node, given local node child
522 * iterator @lni (which points either at the corresponding node, or at the
523 * nearest local tree node after @ni). */
524 struct tree_node *
525 tree_lnode_for_node(struct tree *tree, struct tree_node *ni, struct tree_node *lni, int tenuki_d)
527 /* Now set up lnode, which is the actual local node
528 * corresponding to ni - either lni if it is an
529 * exact match and ni is not tenuki, <pass> local
530 * node if ni is tenuki, or NULL if there is no
531 * corresponding node available. */
533 if (is_pass(node_coord(ni))) {
534 /* Also, for sanity reasons we never use local
535 * tree for passes. (Maybe we could, but it's
536 * too hard to think about.) */
537 return NULL;
540 if (node_coord(lni) == node_coord(ni)) {
541 /* We don't consider tenuki a sequence play
542 * that we have in local tree even though
543 * ni->d is too high; this can happen if this
544 * occured in different board topology. */
545 return lni;
548 if (ni->d >= tenuki_d) {
549 /* Tenuki, pick a pass lsibling if available. */
550 assert(lni->parent && lni->parent->children);
551 if (is_pass(node_coord(lni->parent->children))) {
552 return lni->parent->children;
553 } else {
554 return NULL;
558 /* No corresponding local node, lnode stays NULL. */
559 return NULL;
563 /* Tree symmetry: When possible, we will localize the tree to a single part
564 * of the board in tree_expand_node() and possibly flip along symmetry axes
565 * to another part of the board in tree_promote_at(). We follow b->symmetry
566 * guidelines here. */
569 /* This function must be thread safe, given that board b is only modified by the calling thread. */
570 void
571 tree_expand_node(struct tree *t, struct tree_node *node, struct board *b, enum stone color, struct uct *u, int parity)
573 /* Get a Common Fate Graph distance map from parent node. */
574 int distances[board_size2(b)];
575 if (!is_pass(b->last_move.coord) && !is_resign(b->last_move.coord)) {
576 cfg_distances(b, node_coord(node), distances, TREE_NODE_D_MAX);
577 } else {
578 // Pass or resign - everything is too far.
579 foreach_point(b) { distances[c] = TREE_NODE_D_MAX + 1; } foreach_point_end;
582 /* Get a map of prior values to initialize the new nodes with. */
583 struct prior_map map = {
584 .b = b,
585 .to_play = color,
586 .parity = tree_parity(t, parity),
587 .distances = distances,
589 // Include pass in the prior map.
590 struct move_stats map_prior[board_size2(b) + 1]; map.prior = &map_prior[1];
591 bool map_consider[board_size2(b) + 1]; map.consider = &map_consider[1];
592 memset(map_prior, 0, sizeof(map_prior));
593 memset(map_consider, 0, sizeof(map_consider));
594 map.consider[pass] = true;
595 int child_count = 1; // for pass
596 foreach_free_point(b) {
597 assert(board_at(b, c) == S_NONE);
598 if (!board_is_valid_play(b, color, c))
599 continue;
600 map.consider[c] = true;
601 child_count++;
602 } foreach_free_point_end;
603 uct_prior(u, node, &map);
605 /* Now, create the nodes (all at once if fast_alloc) */
606 struct tree_node *ni = t->nodes ? tree_alloc_node(t, child_count, true) : tree_alloc_node(t, 1, false);
607 /* In fast_alloc mode we might temporarily run out of nodes but this should be rare. */
608 if (!ni) {
609 node->is_expanded = false;
610 return;
612 tree_setup_node(t, ni, pass, node->depth + 1);
614 struct tree_node *first_child = ni;
615 ni->parent = node;
616 ni->prior = map.prior[pass]; ni->d = TREE_NODE_D_MAX + 1;
618 /* The loop considers only the symmetry playground. */
619 if (UDEBUGL(6)) {
620 fprintf(stderr, "expanding %s within [%d,%d],[%d,%d] %d-%d\n",
621 coord2sstr(node_coord(node), b),
622 b->symmetry.x1, b->symmetry.y1,
623 b->symmetry.x2, b->symmetry.y2,
624 b->symmetry.type, b->symmetry.d);
626 int child = 1;
627 for (int j = b->symmetry.y1; j <= b->symmetry.y2; j++) {
628 for (int i = b->symmetry.x1; i <= b->symmetry.x2; i++) {
629 if (b->symmetry.d) {
630 int x = b->symmetry.type == SYM_DIAG_DOWN ? board_size(b) - 1 - i : i;
631 if (x > j) {
632 if (UDEBUGL(7))
633 fprintf(stderr, "drop %d,%d\n", i, j);
634 continue;
638 coord_t c = coord_xy(t->board, i, j);
639 if (!map.consider[c]) // Filter out invalid moves
640 continue;
641 assert(c != node_coord(node)); // I have spotted "C3 C3" in some sequence...
643 struct tree_node *nj = t->nodes ? first_child + child++ : tree_alloc_node(t, 1, false);
644 tree_setup_node(t, nj, c, node->depth + 1);
645 nj->parent = node; ni->sibling = nj; ni = nj;
647 ni->prior = map.prior[c];
648 ni->d = distances[c];
651 node->children = first_child; // must be done at the end to avoid race
655 static coord_t
656 flip_coord(struct board *b, coord_t c,
657 bool flip_horiz, bool flip_vert, int flip_diag)
659 int x = coord_x(c, b), y = coord_y(c, b);
660 if (flip_diag) {
661 int z = x; x = y; y = z;
663 if (flip_horiz) {
664 x = board_size(b) - 1 - x;
666 if (flip_vert) {
667 y = board_size(b) - 1 - y;
669 return coord_xy(b, x, y);
672 static void
673 tree_fix_node_symmetry(struct board *b, struct tree_node *node,
674 bool flip_horiz, bool flip_vert, int flip_diag)
676 if (!is_pass(node_coord(node)))
677 node->coord = flip_coord(b, node_coord(node), flip_horiz, flip_vert, flip_diag);
679 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
680 tree_fix_node_symmetry(b, ni, flip_horiz, flip_vert, flip_diag);
683 static void
684 tree_fix_symmetry(struct tree *tree, struct board *b, coord_t c)
686 if (is_pass(c))
687 return;
689 struct board_symmetry *s = &tree->root_symmetry;
690 int cx = coord_x(c, b), cy = coord_y(c, b);
692 /* playground X->h->v->d normalization
693 * :::.. .d...
694 * .::.. v....
695 * ..:.. .....
696 * ..... h...X
697 * ..... ..... */
698 bool flip_horiz = cx < s->x1 || cx > s->x2;
699 bool flip_vert = cy < s->y1 || cy > s->y2;
701 bool flip_diag = 0;
702 if (s->d) {
703 bool dir = (s->type == SYM_DIAG_DOWN);
704 int x = dir ^ flip_horiz ^ flip_vert ? board_size(b) - 1 - cx : cx;
705 if (flip_vert ? x < cy : x > cy) {
706 flip_diag = 1;
710 if (DEBUGL(4)) {
711 fprintf(stderr, "%s [%d,%d -> %d,%d;%d,%d] will flip %d %d %d -> %s, sym %d (%d) -> %d (%d)\n",
712 coord2sstr(c, b),
713 cx, cy, s->x1, s->y1, s->x2, s->y2,
714 flip_horiz, flip_vert, flip_diag,
715 coord2sstr(flip_coord(b, c, flip_horiz, flip_vert, flip_diag), b),
716 s->type, s->d, b->symmetry.type, b->symmetry.d);
718 if (flip_horiz || flip_vert || flip_diag)
719 tree_fix_node_symmetry(b, tree->root, flip_horiz, flip_vert, flip_diag);
723 static void
724 tree_unlink_node(struct tree_node *node)
726 struct tree_node *ni = node->parent;
727 if (ni->children == node) {
728 ni->children = node->sibling;
729 } else {
730 ni = ni->children;
731 while (ni->sibling != node)
732 ni = ni->sibling;
733 ni->sibling = node->sibling;
735 node->sibling = NULL;
736 node->parent = NULL;
739 /* Reduce weight of statistics on promotion. Remove nodes that
740 * get reduced to zero playouts; returns next node to consider
741 * in the children list (@node may get deleted). */
742 static struct tree_node *
743 tree_age_node(struct tree *tree, struct tree_node *node)
745 node->u.playouts /= tree->ltree_aging;
746 if (node->parent && !node->u.playouts) {
747 struct tree_node *sibling = node->sibling;
748 /* Delete node, no playouts. */
749 tree_unlink_node(node);
750 tree_done_node(tree, node);
751 return sibling;
754 struct tree_node *ni = node->children;
755 while (ni) ni = tree_age_node(tree, ni);
756 return node->sibling;
759 /* Promotes the given node as the root of the tree. In the fast_alloc
760 * mode, the node may be moved and some of its subtree may be pruned. */
761 void
762 tree_promote_node(struct tree *tree, struct tree_node **node)
764 assert((*node)->parent == tree->root);
765 tree_unlink_node(*node);
766 if (!tree->nodes) {
767 /* Freeing the rest of the tree can take several seconds on large
768 * trees, so we must do it asynchronously: */
769 tree_done_node_detached(tree, tree->root);
770 } else {
771 /* Garbage collect if we run out of memory, or it is cheap to do so now: */
772 if (tree->nodes_size >= tree->pruning_threshold
773 || (tree->nodes_size >= tree->max_tree_size / 10 && (*node)->u.playouts < SMALL_TREE_PLAYOUTS))
774 *node = tree_garbage_collect(tree, *node);
776 tree->root = *node;
777 tree->root_color = stone_other(tree->root_color);
779 board_symmetry_update(tree->board, &tree->root_symmetry, node_coord(*node));
780 tree->avg_score.playouts = 0;
782 /* If the tree deepest node was under node, or if we called tree_garbage_collect,
783 * tree->max_depth is correct. Otherwise we could traverse the tree
784 * to recompute max_depth but it's not worth it: it's just for debugging
785 * and soon the tree will grow and max_depth will become correct again. */
787 if (tree->ltree_aging != 1.0f) { // XXX: != should work here even with the floating_t
788 tree_age_node(tree, tree->ltree_black);
789 tree_age_node(tree, tree->ltree_white);
793 bool
794 tree_promote_at(struct tree *tree, struct board *b, coord_t c)
796 tree_fix_symmetry(tree, b, c);
798 for (struct tree_node *ni = tree->root->children; ni; ni = ni->sibling) {
799 if (node_coord(ni) == c) {
800 tree_promote_node(tree, &ni);
801 return true;
804 return false;