Distributed slave: update tree stats from increments sent by master
[pachi/peepo.git] / uct / tree.c
blob37df37fc70058ae94b1886d4d4323419b1cdbe34
1 #include <assert.h>
2 #include <math.h>
3 #include <stddef.h>
4 #include <stdint.h>
5 #include <stdio.h>
6 #include <stdlib.h>
7 #include <string.h>
9 #define DEBUG
10 #include "board.h"
11 #include "debug.h"
12 #include "engine.h"
13 #include "move.h"
14 #include "playout.h"
15 #include "tactics.h"
16 #include "timeinfo.h"
17 #include "uct/internal.h"
18 #include "uct/prior.h"
19 #include "uct/tree.h"
20 #include "uct/slave.h"
23 /* Allocate one node in the fast_alloc mode. The returned node
24 * is _not_ initialized. Returns NULL if not enough memory.
25 * This function may be called by multiple threads in parallel. */
26 static struct tree_node *
27 tree_fast_alloc_node(struct tree *t)
29 assert(t->nodes != NULL);
30 struct tree_node *n = NULL;
31 unsigned long old_size =__sync_fetch_and_add(&t->nodes_size, sizeof(*n));
33 /* The test below works even if max_tree_size is not a
34 * multiple of the node size because tree_init() allocates
35 * space for an extra node. */
36 if (old_size < t->max_tree_size)
37 n = (struct tree_node *)(t->nodes + old_size);
38 return n;
41 /* Allocate and initialize a node. Returns NULL (fast_alloc mode)
42 * or exits the main program if not enough memory.
43 * This function may be called by multiple threads in parallel. */
44 static struct tree_node *
45 tree_init_node(struct tree *t, coord_t coord, int depth, bool fast_alloc)
47 struct tree_node *n;
48 if (fast_alloc) {
49 n = tree_fast_alloc_node(t);
50 if (!n) return n;
51 memset(n, 0, sizeof(*n));
52 } else {
53 n = calloc2(1, sizeof(*n));
54 __sync_fetch_and_add(&t->nodes_size, sizeof(*n));
56 n->coord = coord;
57 n->depth = depth;
58 volatile static long c = 1000000;
59 n->hash = __sync_fetch_and_add(&c, 1);
60 if (depth > t->max_depth)
61 t->max_depth = depth;
62 return n;
65 /* Create a tree structure. Pre-allocate all nodes if max_tree_size is > 0. */
66 struct tree *
67 tree_init(struct board *board, enum stone color, unsigned long max_tree_size, float ltree_aging, int hbits)
69 struct tree *t = calloc2(1, sizeof(*t));
70 t->board = board;
71 t->max_tree_size = max_tree_size;
72 if (max_tree_size != 0) {
73 /* Allocate one extra node, max_tree_size may not be multiple of node size. */
74 t->nodes = malloc2(max_tree_size + sizeof(struct tree_node));
75 /* The nodes buffer doesn't need initialization. This is currently
76 * done by tree_init_node to spread the load. Doing a memset for the
77 * entire buffer here would be too slow for large trees (>10 GB). */
79 /* The root PASS move is only virtual, we never play it. */
80 t->root = tree_init_node(t, pass, 0, t->nodes);
81 t->root_symmetry = board->symmetry;
82 t->root_color = stone_other(color); // to research black moves, root will be white
84 t->ltree_black = tree_init_node(t, pass, 0, false);
85 t->ltree_white = tree_init_node(t, pass, 0, false);
86 t->ltree_aging = ltree_aging;
88 t->hbits = hbits;
89 if (hbits) t->htable = uct_htable_alloc(hbits);
90 return t;
94 /* This function may be called by multiple threads in parallel on the
95 * same tree, but not on node n. n may be detached from the tree but
96 * must have been created in this tree originally.
97 * It returns the remaining size of the tree after n has been freed. */
98 static unsigned long
99 tree_done_node(struct tree *t, struct tree_node *n)
101 struct tree_node *ni = n->children;
102 while (ni) {
103 struct tree_node *nj = ni->sibling;
104 tree_done_node(t, ni);
105 ni = nj;
107 free(n);
108 unsigned long old_size = __sync_fetch_and_sub(&t->nodes_size, sizeof(*n));
109 return old_size - sizeof(*n);
112 struct subtree_ctx {
113 struct tree *t;
114 struct tree_node *n;
117 /* Worker thread for tree_done_node_detached(). Only for fast_alloc=false. */
118 static void *
119 tree_done_node_worker(void *ctx_)
121 struct subtree_ctx *ctx = ctx_;
122 char *str = coord2str(ctx->n->coord, ctx->t->board);
124 unsigned long tree_size = tree_done_node(ctx->t, ctx->n);
125 if (!tree_size)
126 free(ctx->t);
127 if (DEBUGL(2))
128 fprintf(stderr, "done freeing node at %s, tree size %lu\n", str, tree_size);
129 free(str);
130 free(ctx);
131 return NULL;
134 /* Asynchronously free the subtree of nodes rooted at n. If the tree becomes
135 * empty free the tree also. Only for fast_alloc=false. */
136 static void
137 tree_done_node_detached(struct tree *t, struct tree_node *n)
139 if (n->u.playouts < 1000) { // no thread for small tree
140 if (!tree_done_node(t, n))
141 free(t);
142 return;
144 pthread_attr_t attr;
145 pthread_attr_init(&attr);
146 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
148 pthread_t thread;
149 struct subtree_ctx *ctx = malloc2(sizeof(struct subtree_ctx));
150 ctx->t = t;
151 ctx->n = n;
152 pthread_create(&thread, &attr, tree_done_node_worker, ctx);
153 pthread_attr_destroy(&attr);
156 void
157 tree_done(struct tree *t)
159 tree_done_node(t, t->ltree_black);
160 tree_done_node(t, t->ltree_white);
162 if (t->htable) free(t->htable);
163 if (t->nodes) {
164 free(t->nodes);
165 free(t);
166 } else if (!tree_done_node(t, t->root)) {
167 free(t);
168 /* A tree_done_node_worker might still be running on this tree but
169 * it will free the tree later. It is also freeing nodes faster than
170 * we will create new ones. */
175 static void
176 tree_node_dump(struct tree *tree, struct tree_node *node, int l, int thres)
178 for (int i = 0; i < l; i++) fputc(' ', stderr);
179 int children = 0;
180 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
181 children++;
182 /* We use 1 as parity, since for all nodes we want to know the
183 * win probability of _us_, not the node color. */
184 fprintf(stderr, "[%s] %f %% %d [prior %f %% %d amaf %f %% %d]; hints %x; %d children <%"PRIhash">\n",
185 coord2sstr(node->coord, tree->board),
186 tree_node_get_value(tree, 1, node->u.value), node->u.playouts,
187 tree_node_get_value(tree, 1, node->prior.value), node->prior.playouts,
188 tree_node_get_value(tree, 1, node->amaf.value), node->amaf.playouts,
189 node->hints, children, node->hash);
191 /* Print nodes sorted by #playouts. */
193 struct tree_node *nbox[1000]; int nboxl = 0;
194 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
195 if (ni->u.playouts > thres)
196 nbox[nboxl++] = ni;
198 while (true) {
199 int best = -1;
200 for (int i = 0; i < nboxl; i++)
201 if (nbox[i] && (best < 0 || nbox[i]->u.playouts > nbox[best]->u.playouts))
202 best = i;
203 if (best < 0)
204 break;
205 tree_node_dump(tree, nbox[best], l + 1, /* node->u.value < 0.1 ? 0 : */ thres);
206 nbox[best] = NULL;
210 void
211 tree_dump(struct tree *tree, int thres)
213 if (thres && tree->root->u.playouts / thres > 100) {
214 /* Be a bit sensible about this; the opening book can create
215 * huge dumps at first. */
216 thres = tree->root->u.playouts / 100 * (thres < 1000 ? 1 : thres / 1000);
218 fprintf(stderr, "(UCT tree; root %s; extra komi %f)\n",
219 stone2str(tree->root_color), tree->extra_komi);
220 tree_node_dump(tree, tree->root, 0, thres);
222 if (DEBUGL(3) && tree->ltree_black) {
223 fprintf(stderr, "B local tree:\n");
224 tree_node_dump(tree, tree->ltree_black, 0, thres);
225 fprintf(stderr, "W local tree:\n");
226 tree_node_dump(tree, tree->ltree_white, 0, thres);
231 static char *
232 tree_book_name(struct board *b)
234 static char buf[256];
235 if (b->handicap > 0) {
236 sprintf(buf, "uctbook-%d-%02.01f-h%d.pachitree", b->size - 2, b->komi, b->handicap);
237 } else {
238 sprintf(buf, "uctbook-%d-%02.01f.pachitree", b->size - 2, b->komi);
240 return buf;
243 static void
244 tree_node_save(FILE *f, struct tree_node *node, int thres)
246 bool save_children = node->u.playouts >= thres;
248 if (!save_children)
249 node->is_expanded = 0;
251 fputc(1, f);
252 fwrite(((void *) node) + offsetof(struct tree_node, depth),
253 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
254 1, f);
256 if (save_children) {
257 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
258 tree_node_save(f, ni, thres);
259 } else {
260 if (node->children)
261 node->is_expanded = 1;
264 fputc(0, f);
267 void
268 tree_save(struct tree *tree, struct board *b, int thres)
270 char *filename = tree_book_name(b);
271 FILE *f = fopen(filename, "wb");
272 if (!f) {
273 perror("fopen");
274 return;
276 tree_node_save(f, tree->root, thres);
277 fputc(0, f);
278 fclose(f);
282 void
283 tree_node_load(FILE *f, struct tree_node *node, int *num)
285 (*num)++;
287 fread(((void *) node) + offsetof(struct tree_node, depth),
288 sizeof(struct tree_node) - offsetof(struct tree_node, depth),
289 1, f);
291 /* Keep values in sane scale, otherwise we start overflowing. */
292 #define MAX_PLAYOUTS 10000000
293 if (node->u.playouts > MAX_PLAYOUTS) {
294 node->u.playouts = MAX_PLAYOUTS;
296 if (node->amaf.playouts > MAX_PLAYOUTS) {
297 node->amaf.playouts = MAX_PLAYOUTS;
299 memcpy(&node->pamaf, &node->amaf, sizeof(node->amaf));
300 memcpy(&node->pu, &node->u, sizeof(node->u));
302 struct tree_node *ni = NULL, *ni_prev = NULL;
303 while (fgetc(f)) {
304 ni_prev = ni; ni = calloc2(1, sizeof(*ni));
305 if (!node->children)
306 node->children = ni;
307 else
308 ni_prev->sibling = ni;
309 ni->parent = node;
310 tree_node_load(f, ni, num);
314 void
315 tree_load(struct tree *tree, struct board *b)
317 char *filename = tree_book_name(b);
318 FILE *f = fopen(filename, "rb");
319 if (!f)
320 return;
322 fprintf(stderr, "Loading opening book %s...\n", filename);
324 int num = 0;
325 if (fgetc(f))
326 tree_node_load(f, tree->root, &num);
327 fprintf(stderr, "Loaded %d nodes.\n", num);
329 fclose(f);
333 static struct tree_node *
334 tree_node_copy(struct tree_node *node)
336 struct tree_node *n2 = malloc2(sizeof(*n2));
337 *n2 = *node;
338 if (!node->children)
339 return n2;
340 struct tree_node *ni = node->children;
341 struct tree_node *ni2 = tree_node_copy(ni);
342 n2->children = ni2; ni2->parent = n2;
343 while ((ni = ni->sibling)) {
344 ni2->sibling = tree_node_copy(ni);
345 ni2 = ni2->sibling; ni2->parent = n2;
347 return n2;
350 struct tree *
351 tree_copy(struct tree *tree)
353 assert(!tree->nodes);
354 struct tree *t2 = malloc2(sizeof(*t2));
355 *t2 = *tree;
356 t2->root = tree_node_copy(tree->root);
357 return t2;
360 /* Copy the subtree rooted at node: all nodes at or below depth
361 * or with at least threshold playouts. Only for fast_alloc.
362 * The code is destructive on src. The relative order of children of
363 * a given node is preserved (assumed by tree_get_node in particular).
364 * Returns the copy of node in the destination tree, or NULL
365 * if we could not copy it. */
366 static struct tree_node *
367 tree_prune(struct tree *dest, struct tree *src, struct tree_node *node,
368 int threshold, int depth)
370 assert(dest->nodes && node);
371 struct tree_node *n2 = tree_fast_alloc_node(dest);
372 if (!n2)
373 return NULL;
374 *n2 = *node;
375 if (n2->depth > dest->max_depth)
376 dest->max_depth = n2->depth;
377 n2->children = NULL;
378 n2->is_expanded = false;
380 if (node->depth >= depth && node->u.playouts < threshold)
381 return n2;
382 /* For deep nodes with many playouts, we must copy all children,
383 * even those with zero playouts, because partially expanded
384 * nodes are not supported. Considering them as fully expanded
385 * would degrade the playing strength. The only exception is
386 * when dest becomes full, but this should never happen in practice
387 * if threshold is chosen to limit the number of nodes traversed. */
388 struct tree_node *ni = node->children;
389 if (!ni)
390 return n2;
391 struct tree_node **prev2 = &(n2->children);
392 while (ni) {
393 struct tree_node *ni2 = tree_prune(dest, src, ni, threshold, depth);
394 if (!ni2) break;
395 *prev2 = ni2;
396 prev2 = &(ni2->sibling);
397 ni2->parent = n2;
398 ni = ni->sibling;
400 if (!ni) {
401 n2->is_expanded = true;
402 } else {
403 n2->children = NULL; // avoid partially expanded nodes
405 return n2;
408 /* The following constants are used for garbage collection of nodes.
409 * A tree is considered large if the top node has >= 40K playouts.
410 * For such trees, we copy deep nodes only if they have enough
411 * playouts, with a gradually increasing threshold up to 40.
412 * These constants define how much time we're willing to spend
413 * scanning the source tree when promoting a move. The chosen values
414 * make worst case pruning in about 3s for 20 GB ram, and this
415 * is only for long thinking time (>1M playouts). For fast games the
416 * trees don't grow large. For small ram or fast game we copy the
417 * entire tree. These values do not degrade playing strength and are
418 * necessary to avoid losing on time; increasing DEEP_PLAYOUTS_THRESHOLD
419 * or decreasing LARGE_TREE_PLAYOUTS will make the program faster but
420 * playing worse. */
421 #define LARGE_TREE_PLAYOUTS 40000LL
422 #define DEEP_PLAYOUTS_THRESHOLD 40
424 /* Garbage collect the tree early if the top node has < 5K playouts,
425 * to avoid having to do it later on a large subtree.
426 * This guarantees garbage collection in < 1s. */
427 #define SMALL_TREE_PLAYOUTS 5000
429 /* Free all the tree, keeping only the subtree rooted at node.
430 * Prune the subtree if necessary to fit in max_size bytes or
431 * to save time scanning the tree.
432 * Returns the moved node. Only for fast_alloc. */
433 struct tree_node *
434 tree_garbage_collect(struct tree *tree, unsigned long max_size, struct tree_node *node)
436 assert(tree->nodes && !node->parent && !node->sibling);
437 double start_time = time_now();
439 struct tree *temp_tree = tree_init(tree->board, tree->root_color, max_size, tree->ltree_aging, 0);
440 temp_tree->nodes_size = 0; // We do not want the dummy pass node
441 struct tree_node *temp_node;
443 /* Find the maximum depth at which we can copy all nodes. */
444 int max_nodes = 1;
445 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
446 max_nodes++;
447 unsigned long nodes_size = max_nodes * sizeof(*node);
448 int max_depth = node->depth;
449 while (nodes_size < max_size && max_nodes > 1) {
450 max_nodes--;
451 nodes_size += max_nodes * nodes_size;
452 max_depth++;
455 /* Copy all nodes for small trees. For large trees, copy all nodes
456 * with depth <= max_depth, and all nodes with enough playouts.
457 * Avoiding going too deep (except for nodes with many playouts) is mostly
458 * to save time scanning the source tree. It can take over 20s to traverse
459 * completely a large source tree (20 GB) even without copying because
460 * the traversal is not friendly at all with the memory cache. */
461 int threshold = (node->u.playouts - LARGE_TREE_PLAYOUTS) * DEEP_PLAYOUTS_THRESHOLD / LARGE_TREE_PLAYOUTS;
462 if (threshold < 0) threshold = 0;
463 if (threshold > DEEP_PLAYOUTS_THRESHOLD) threshold = DEEP_PLAYOUTS_THRESHOLD;
464 temp_node = tree_prune(temp_tree, tree, node, threshold, max_depth);
465 assert(temp_node);
467 /* Now copy back to original tree. */
468 tree->nodes_size = 0;
469 tree->max_depth = 0;
470 struct tree_node *new_node = tree_prune(tree, temp_tree, temp_node, 0, temp_tree->max_depth);
472 if (DEBUGL(1)) {
473 double now = time_now();
474 static double prev_time;
475 if (!prev_time) prev_time = start_time;
476 fprintf(stderr,
477 "tree pruned in %0.6g s, prev %0.3g s ago, dest depth %d wanted %d,"
478 " max_size %lu, pruned size %lu, playouts %d\n",
479 now - start_time, start_time - prev_time, temp_tree->max_depth, max_depth,
480 max_size, temp_tree->nodes_size, new_node->u.playouts);
481 prev_time = start_time;
483 if (temp_tree->nodes_size >= temp_tree->max_tree_size) {
484 fprintf(stderr, "temp tree overflow, increase max_tree_size %lu or MIN_FREE_MEM_PERCENT %llu\n",
485 tree->max_tree_size, MIN_FREE_MEM_PERCENT);
486 } else {
487 assert(tree->nodes_size == temp_tree->nodes_size);
488 assert(tree->max_depth == temp_tree->max_depth);
490 tree_done(temp_tree);
491 return new_node;
495 static void
496 tree_node_merge(struct tree_node *dest, struct tree_node *src)
498 /* Do not merge nodes that weren't touched at all. */
499 assert(dest->pamaf.playouts == src->pamaf.playouts);
500 assert(dest->pu.playouts == src->pu.playouts);
501 if (src->amaf.playouts - src->pamaf.playouts == 0
502 && src->u.playouts - src->pu.playouts == 0) {
503 return;
506 dest->hints |= src->hints;
508 /* Merge the children, both are coord-sorted lists. */
509 struct tree_node *di = dest->children, **dref = &dest->children;
510 struct tree_node *si = src->children, **sref = &src->children;
511 while (di && si) {
512 if (di->coord != si->coord) {
513 /* src has some extra items or misses di */
514 struct tree_node *si2 = si->sibling;
515 while (si2 && di->coord != si2->coord) {
516 si2 = si2->sibling;
518 if (!si2)
519 goto next_di; /* src misses di, move on */
520 /* chain the extra [si,si2) items before di */
521 (*dref) = si;
522 while (si->sibling != si2) {
523 si->parent = dest;
524 si = si->sibling;
526 si->parent = dest;
527 si->sibling = di;
528 si = si2;
529 (*sref) = si;
531 /* Matching nodes - recurse... */
532 tree_node_merge(di, si);
533 /* ...and move on. */
534 sref = &si->sibling; si = si->sibling;
535 next_di:
536 dref = &di->sibling; di = di->sibling;
538 if (si) {
539 /* Some outstanding nodes are left on src side, rechain
540 * them to dst. */
541 (*dref) = si;
542 while (si) {
543 si->parent = dest;
544 si = si->sibling;
546 (*sref) = NULL;
549 /* Priors should be constant. */
550 assert(dest->prior.playouts == src->prior.playouts && dest->prior.value == src->prior.value);
552 stats_merge(&dest->amaf, &src->amaf);
553 stats_merge(&dest->u, &src->u);
556 /* Merge two trees built upon the same board. Note that the operation is
557 * destructive on src. */
558 void
559 tree_merge(struct tree *dest, struct tree *src)
561 if (src->max_depth > dest->max_depth)
562 dest->max_depth = src->max_depth;
563 tree_node_merge(dest->root, src->root);
567 static void
568 tree_node_normalize(struct tree_node *node, int factor)
570 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
571 tree_node_normalize(ni, factor);
573 #define normalize(s1, s2, t) node->s2.t = node->s1.t + (node->s2.t - node->s1.t) / factor;
574 normalize(pamaf, amaf, playouts);
575 memcpy(&node->pamaf, &node->amaf, sizeof(node->amaf));
577 normalize(pu, u, playouts);
578 memcpy(&node->pu, &node->u, sizeof(node->u));
579 #undef normalize
582 /* Normalize a tree, dividing the amaf and u values by given
583 * factor; otherwise, simulations run in independent threads
584 * two trees built upon the same board. To correctly handle
585 * results taken from previous simulation run, they are backed
586 * up in tree. */
587 void
588 tree_normalize(struct tree *tree, int factor)
590 tree_node_normalize(tree->root, factor);
594 /* Get a node of given coordinate from within parent, possibly creating it
595 * if necessary - in a very raw form (no .d, priors, ...). */
596 /* FIXME: Adjust for board symmetry. */
597 struct tree_node *
598 tree_get_node(struct tree *t, struct tree_node *parent, coord_t c, bool create)
600 if (!parent->children || parent->children->coord >= c) {
601 /* Special case: Insertion at the beginning. */
602 if (parent->children && parent->children->coord == c)
603 return parent->children;
604 if (!create)
605 return NULL;
607 struct tree_node *nn = tree_init_node(t, c, parent->depth + 1, false);
608 nn->parent = parent; nn->sibling = parent->children;
609 parent->children = nn;
610 return nn;
613 /* No candidate at the beginning, look through all the children. */
615 struct tree_node *ni;
616 for (ni = parent->children; ni->sibling; ni = ni->sibling)
617 if (ni->sibling->coord >= c)
618 break;
620 if (ni->sibling && ni->sibling->coord == c)
621 return ni->sibling;
622 assert(ni->coord < c);
623 if (!create)
624 return NULL;
626 struct tree_node *nn = tree_init_node(t, c, parent->depth + 1, false);
627 nn->parent = parent; nn->sibling = ni->sibling; ni->sibling = nn;
628 return nn;
631 /* Get local tree node corresponding to given node, given local node child
632 * iterator @lni (which points either at the corresponding node, or at the
633 * nearest local tree node after @ni). */
634 struct tree_node *
635 tree_lnode_for_node(struct tree *tree, struct tree_node *ni, struct tree_node *lni, int tenuki_d)
637 /* Now set up lnode, which is the actual local node
638 * corresponding to ni - either lni if it is an
639 * exact match and ni is not tenuki, <pass> local
640 * node if ni is tenuki, or NULL if there is no
641 * corresponding node available. */
643 if (is_pass(ni->coord)) {
644 /* Also, for sanity reasons we never use local
645 * tree for passes. (Maybe we could, but it's
646 * too hard to think about.) */
647 return NULL;
650 if (lni->coord == ni->coord) {
651 /* We don't consider tenuki a sequence play
652 * that we have in local tree even though
653 * ni->d is too high; this can happen if this
654 * occured in different board topology. */
655 return lni;
658 if (ni->d >= tenuki_d) {
659 /* Tenuki, pick a pass lsibling if available. */
660 assert(lni->parent && lni->parent->children);
661 if (is_pass(lni->parent->children->coord)) {
662 return lni->parent->children;
663 } else {
664 return NULL;
668 /* No corresponding local node, lnode stays NULL. */
669 return NULL;
673 /* Tree symmetry: When possible, we will localize the tree to a single part
674 * of the board in tree_expand_node() and possibly flip along symmetry axes
675 * to another part of the board in tree_promote_at(). We follow b->symmetry
676 * guidelines here. */
679 /* This function must be thread safe, given that board b is only modified by the calling thread. */
680 void
681 tree_expand_node(struct tree *t, struct tree_node *node, struct board *b, enum stone color, struct uct *u, int parity)
683 /* Get a Common Fate Graph distance map from parent node. */
684 int distances[board_size2(b)];
685 if (!is_pass(b->last_move.coord) && !is_resign(b->last_move.coord)) {
686 cfg_distances(b, node->coord, distances, TREE_NODE_D_MAX);
687 } else {
688 // Pass or resign - everything is too far.
689 foreach_point(b) { distances[c] = TREE_NODE_D_MAX + 1; } foreach_point_end;
692 /* Get a map of prior values to initialize the new nodes with. */
693 struct prior_map map = {
694 .b = b,
695 .to_play = color,
696 .parity = tree_parity(t, parity),
697 .distances = distances,
699 // Include pass in the prior map.
700 struct move_stats map_prior[board_size2(b) + 1]; map.prior = &map_prior[1];
701 bool map_consider[board_size2(b) + 1]; map.consider = &map_consider[1];
702 memset(map_prior, 0, sizeof(map_prior));
703 memset(map_consider, 0, sizeof(map_consider));
704 map.consider[pass] = true;
705 foreach_point(b) {
706 if (board_at(b, c) != S_NONE)
707 continue;
708 if (!board_is_valid_play(b, color, c))
709 continue;
710 map.consider[c] = true;
711 } foreach_point_end;
712 uct_prior(u, node, &map);
714 /* Now, create the nodes. */
715 struct tree_node *ni = tree_init_node(t, pass, node->depth + 1, t->nodes);
716 /* In fast_alloc mode we might temporarily run out of nodes but
717 * this should be rare if MIN_FREE_MEM_PERCENT is set correctly. */
718 if (!ni) {
719 node->is_expanded = false;
720 return;
722 struct tree_node *first_child = ni;
723 ni->parent = node;
724 ni->prior = map.prior[pass]; ni->d = TREE_NODE_D_MAX + 1;
726 /* The loop considers only the symmetry playground. */
727 if (UDEBUGL(6)) {
728 fprintf(stderr, "expanding %s within [%d,%d],[%d,%d] %d-%d\n",
729 coord2sstr(node->coord, b),
730 b->symmetry.x1, b->symmetry.y1,
731 b->symmetry.x2, b->symmetry.y2,
732 b->symmetry.type, b->symmetry.d);
734 for (int j = b->symmetry.y1; j <= b->symmetry.y2; j++) {
735 for (int i = b->symmetry.x1; i <= b->symmetry.x2; i++) {
736 if (b->symmetry.d) {
737 int x = b->symmetry.type == SYM_DIAG_DOWN ? board_size(b) - 1 - i : i;
738 if (x > j) {
739 if (UDEBUGL(7))
740 fprintf(stderr, "drop %d,%d\n", i, j);
741 continue;
745 coord_t c = coord_xy(t->board, i, j);
746 if (!map.consider[c]) // Filter out invalid moves
747 continue;
748 assert(c != node->coord); // I have spotted "C3 C3" in some sequence...
750 struct tree_node *nj = tree_init_node(t, c, node->depth + 1, t->nodes);
751 if (!nj) {
752 node->is_expanded = false;
753 return;
755 nj->parent = node; ni->sibling = nj; ni = nj;
757 ni->prior = map.prior[c];
758 ni->d = distances[c];
761 node->children = first_child; // must be done at the end to avoid race
765 static coord_t
766 flip_coord(struct board *b, coord_t c,
767 bool flip_horiz, bool flip_vert, int flip_diag)
769 int x = coord_x(c, b), y = coord_y(c, b);
770 if (flip_diag) {
771 int z = x; x = y; y = z;
773 if (flip_horiz) {
774 x = board_size(b) - 1 - x;
776 if (flip_vert) {
777 y = board_size(b) - 1 - y;
779 return coord_xy(b, x, y);
782 static void
783 tree_fix_node_symmetry(struct board *b, struct tree_node *node,
784 bool flip_horiz, bool flip_vert, int flip_diag)
786 if (!is_pass(node->coord))
787 node->coord = flip_coord(b, node->coord, flip_horiz, flip_vert, flip_diag);
789 for (struct tree_node *ni = node->children; ni; ni = ni->sibling)
790 tree_fix_node_symmetry(b, ni, flip_horiz, flip_vert, flip_diag);
793 static void
794 tree_fix_symmetry(struct tree *tree, struct board *b, coord_t c)
796 if (is_pass(c))
797 return;
799 struct board_symmetry *s = &tree->root_symmetry;
800 int cx = coord_x(c, b), cy = coord_y(c, b);
802 /* playground X->h->v->d normalization
803 * :::.. .d...
804 * .::.. v....
805 * ..:.. .....
806 * ..... h...X
807 * ..... ..... */
808 bool flip_horiz = cx < s->x1 || cx > s->x2;
809 bool flip_vert = cy < s->y1 || cy > s->y2;
811 bool flip_diag = 0;
812 if (s->d) {
813 bool dir = (s->type == SYM_DIAG_DOWN);
814 int x = dir ^ flip_horiz ^ flip_vert ? board_size(b) - 1 - cx : cx;
815 if (flip_vert ? x < cy : x > cy) {
816 flip_diag = 1;
820 if (DEBUGL(4)) {
821 fprintf(stderr, "%s [%d,%d -> %d,%d;%d,%d] will flip %d %d %d -> %s, sym %d (%d) -> %d (%d)\n",
822 coord2sstr(c, b),
823 cx, cy, s->x1, s->y1, s->x2, s->y2,
824 flip_horiz, flip_vert, flip_diag,
825 coord2sstr(flip_coord(b, c, flip_horiz, flip_vert, flip_diag), b),
826 s->type, s->d, b->symmetry.type, b->symmetry.d);
828 if (flip_horiz || flip_vert || flip_diag)
829 tree_fix_node_symmetry(b, tree->root, flip_horiz, flip_vert, flip_diag);
833 static void
834 tree_unlink_node(struct tree_node *node)
836 struct tree_node *ni = node->parent;
837 if (ni->children == node) {
838 ni->children = node->sibling;
839 } else {
840 ni = ni->children;
841 while (ni->sibling != node)
842 ni = ni->sibling;
843 ni->sibling = node->sibling;
845 node->sibling = NULL;
846 node->parent = NULL;
849 /* Reduce weight of statistics on promotion. Remove nodes that
850 * get reduced to zero playouts; returns next node to consider
851 * in the children list (@node may get deleted). */
852 static struct tree_node *
853 tree_age_node(struct tree *tree, struct tree_node *node)
855 node->u.playouts /= tree->ltree_aging;
856 if (node->parent && !node->u.playouts) {
857 struct tree_node *sibling = node->sibling;
858 /* Delete node, no playouts. */
859 tree_unlink_node(node);
860 tree_done_node(tree, node);
861 return sibling;
864 struct tree_node *ni = node->children;
865 while (ni) ni = tree_age_node(tree, ni);
866 return node->sibling;
869 /* Promotes the given node as the root of the tree. In the fast_alloc
870 * mode, the node may be moved and some of its subtree may be pruned. */
871 void
872 tree_promote_node(struct tree *tree, struct tree_node **node)
874 assert((*node)->parent == tree->root);
875 tree_unlink_node(*node);
876 if (!tree->nodes) {
877 /* Freeing the rest of the tree can take several seconds on large
878 * trees, so we must do it asynchronously: */
879 tree_done_node_detached(tree, tree->root);
880 } else {
881 /* Garbage collect if we run out of memory, or it is cheap to do so now: */
882 unsigned long min_free_size = (MIN_FREE_MEM_PERCENT * tree->max_tree_size) / 100;
883 if (tree->nodes_size >= tree->max_tree_size - min_free_size
884 || (tree->nodes_size >= min_free_size && (*node)->u.playouts < SMALL_TREE_PLAYOUTS))
885 *node = tree_garbage_collect(tree, min_free_size, *node);
887 tree->root = *node;
888 tree->root_color = stone_other(tree->root_color);
890 board_symmetry_update(tree->board, &tree->root_symmetry, (*node)->coord);
891 /* See tree.score description for explanation on why don't we zero
892 * score on node promotion. */
893 // tree->score.playouts = 0;
895 /* If the tree deepest node was under node, or if we called tree_garbage_collect,
896 * tree->max_depth is correct. Otherwise we could traverse the tree
897 * to recompute max_depth but it's not worth it: it's just for debugging
898 * and soon the tree will grow and max_depth will become correct again. */
900 if (tree->ltree_aging != 1.0f) { // XXX: != should work here even with the float
901 tree_age_node(tree, tree->ltree_black);
902 tree_age_node(tree, tree->ltree_white);
906 bool
907 tree_promote_at(struct tree *tree, struct board *b, coord_t c)
909 tree_fix_symmetry(tree, b, c);
911 for (struct tree_node *ni = tree->root->children; ni; ni = ni->sibling) {
912 if (ni->coord == c) {
913 tree_promote_node(tree, &ni);
914 return true;
917 return false;