17 #include "uct/internal.h"
18 #include "uct/prior.h"
22 /* Allocate one node in the fast_alloc mode. The returned node
23 * is _not_ initialized. Returns NULL if not enough memory.
24 * This function may be called by multiple threads in parallel. */
25 static struct tree_node
*
26 tree_fast_alloc_node(struct tree
*t
)
28 assert(t
->nodes
!= NULL
);
29 struct tree_node
*n
= NULL
;
30 unsigned long old_size
=__sync_fetch_and_add(&t
->nodes_size
, sizeof(*n
));
32 /* The test below works even if max_tree_size is not a
33 * multiple of the node size because tree_init() allocates
34 * space for an extra node. */
35 if (old_size
< t
->max_tree_size
)
36 n
= (struct tree_node
*)(t
->nodes
+ old_size
);
40 /* Allocate and initialize a node. Returns NULL (fast_alloc mode)
41 * or exits the main program if not enough memory.
42 * This function may be called by multiple threads in parallel. */
43 static struct tree_node
*
44 tree_init_node(struct tree
*t
, coord_t coord
, int depth
, bool fast_alloc
)
48 n
= tree_fast_alloc_node(t
);
50 memset(n
, 0, sizeof(*n
));
52 n
= calloc(1, sizeof(*n
));
54 fprintf(stderr
, "tree_init_node(): OUT OF MEMORY\n");
57 __sync_fetch_and_add(&t
->nodes_size
, sizeof(*n
));
61 volatile static long c
= 1000000;
62 n
->hash
= __sync_fetch_and_add(&c
, 1);
63 if (depth
> t
->max_depth
)
68 /* Create a tree structure. Pre-allocate all nodes if max_tree_size is > 0. */
70 tree_init(struct board
*board
, enum stone color
, unsigned long max_tree_size
, float ltree_aging
)
72 struct tree
*t
= calloc(1, sizeof(*t
));
74 t
->max_tree_size
= max_tree_size
;
75 if (max_tree_size
!= 0) {
76 /* Allocate one extra node, max_tree_size may not be multiple of node size. */
77 t
->nodes
= malloc(max_tree_size
+ sizeof(struct tree_node
));
78 /* The nodes buffer doesn't need initialization. This is currently
79 * done by tree_init_node to spread the load. Doing a memset for the
80 * entire buffer here would be too slow for large trees (>10 GB). */
82 fprintf(stderr
, "tree_init(): OUT OF MEMORY\n");
86 /* The root PASS move is only virtual, we never play it. */
87 t
->root
= tree_init_node(t
, pass
, 0, t
->nodes
);
88 t
->root_symmetry
= board
->symmetry
;
89 t
->root_color
= stone_other(color
); // to research black moves, root will be white
91 t
->ltree_black
= tree_init_node(t
, pass
, 0, false);
92 t
->ltree_white
= tree_init_node(t
, pass
, 0, false);
93 t
->ltree_aging
= ltree_aging
;
98 /* This function may be called by multiple threads in parallel on the
99 * same tree, but not on node n. n may be detached from the tree but
100 * must have been created in this tree originally.
101 * It returns the remaining size of the tree after n has been freed. */
103 tree_done_node(struct tree
*t
, struct tree_node
*n
)
105 struct tree_node
*ni
= n
->children
;
107 struct tree_node
*nj
= ni
->sibling
;
108 tree_done_node(t
, ni
);
112 unsigned long old_size
= __sync_fetch_and_sub(&t
->nodes_size
, sizeof(*n
));
113 return old_size
- sizeof(*n
);
121 /* Worker thread for tree_done_node_detached(). Only for fast_alloc=false. */
123 tree_done_node_worker(void *ctx_
)
125 struct subtree_ctx
*ctx
= ctx_
;
126 char *str
= coord2str(ctx
->n
->coord
, ctx
->t
->board
);
128 unsigned long tree_size
= tree_done_node(ctx
->t
, ctx
->n
);
132 fprintf(stderr
, "done freeing node at %s, tree size %lu\n", str
, tree_size
);
138 /* Asynchronously free the subtree of nodes rooted at n. If the tree becomes
139 * empty free the tree also. Only for fast_alloc=false. */
141 tree_done_node_detached(struct tree
*t
, struct tree_node
*n
)
143 if (n
->u
.playouts
< 1000) { // no thread for small tree
144 if (!tree_done_node(t
, n
))
149 pthread_attr_init(&attr
);
150 pthread_attr_setdetachstate(&attr
, PTHREAD_CREATE_DETACHED
);
153 struct subtree_ctx
*ctx
= malloc(sizeof(struct subtree_ctx
));
155 fprintf(stderr
, "tree_done_node_detached(): OUT OF MEMORY\n");
160 pthread_create(&thread
, &attr
, tree_done_node_worker
, ctx
);
161 pthread_attr_destroy(&attr
);
165 tree_done(struct tree
*t
)
167 tree_done_node(t
, t
->ltree_black
);
168 tree_done_node(t
, t
->ltree_white
);
172 } else if (!tree_done_node(t
, t
->root
)) {
174 /* A tree_done_node_worker might still be running on this tree but
175 * it will free the tree later. It is also freeing nodes faster than
176 * we will create new ones. */
182 tree_node_dump(struct tree
*tree
, struct tree_node
*node
, int l
, int thres
)
184 for (int i
= 0; i
< l
; i
++) fputc(' ', stderr
);
186 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
188 /* We use 1 as parity, since for all nodes we want to know the
189 * win probability of _us_, not the node color. */
190 fprintf(stderr
, "[%s] %f %% %d [prior %f %% %d amaf %f %% %d]; hints %x; %d children <%"PRIhash
">\n",
191 coord2sstr(node
->coord
, tree
->board
),
192 tree_node_get_value(tree
, 1, node
->u
.value
), node
->u
.playouts
,
193 tree_node_get_value(tree
, 1, node
->prior
.value
), node
->prior
.playouts
,
194 tree_node_get_value(tree
, 1, node
->amaf
.value
), node
->amaf
.playouts
,
195 node
->hints
, children
, node
->hash
);
197 /* Print nodes sorted by #playouts. */
199 struct tree_node
*nbox
[1000]; int nboxl
= 0;
200 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
201 if (ni
->u
.playouts
> thres
)
206 for (int i
= 0; i
< nboxl
; i
++)
207 if (nbox
[i
] && (best
< 0 || nbox
[i
]->u
.playouts
> nbox
[best
]->u
.playouts
))
211 tree_node_dump(tree
, nbox
[best
], l
+ 1, /* node->u.value < 0.1 ? 0 : */ thres
);
217 tree_dump(struct tree
*tree
, int thres
)
219 if (thres
&& tree
->root
->u
.playouts
/ thres
> 100) {
220 /* Be a bit sensible about this; the opening book can create
221 * huge dumps at first. */
222 thres
= tree
->root
->u
.playouts
/ 100 * (thres
< 1000 ? 1 : thres
/ 1000);
224 fprintf(stderr
, "(UCT tree; root %s; extra komi %f; avg score %f/%d)\n",
225 stone2str(tree
->root_color
), tree
->extra_komi
,
226 tree
->score
.value
, tree
->score
.playouts
);
227 tree_node_dump(tree
, tree
->root
, 0, thres
);
229 if (DEBUGL(3) && tree
->ltree_black
) {
230 fprintf(stderr
, "B local tree:\n");
231 tree_node_dump(tree
, tree
->ltree_black
, 0, thres
);
232 fprintf(stderr
, "W local tree:\n");
233 tree_node_dump(tree
, tree
->ltree_white
, 0, thres
);
239 tree_book_name(struct board
*b
)
241 static char buf
[256];
242 if (b
->handicap
> 0) {
243 sprintf(buf
, "uctbook-%d-%02.01f-h%d.pachitree", b
->size
- 2, b
->komi
, b
->handicap
);
245 sprintf(buf
, "uctbook-%d-%02.01f.pachitree", b
->size
- 2, b
->komi
);
251 tree_node_save(FILE *f
, struct tree_node
*node
, int thres
)
253 bool save_children
= node
->u
.playouts
>= thres
;
256 node
->is_expanded
= 0;
259 fwrite(((void *) node
) + offsetof(struct tree_node
, depth
),
260 sizeof(struct tree_node
) - offsetof(struct tree_node
, depth
),
264 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
265 tree_node_save(f
, ni
, thres
);
268 node
->is_expanded
= 1;
275 tree_save(struct tree
*tree
, struct board
*b
, int thres
)
277 char *filename
= tree_book_name(b
);
278 FILE *f
= fopen(filename
, "wb");
283 tree_node_save(f
, tree
->root
, thres
);
290 tree_node_load(FILE *f
, struct tree_node
*node
, int *num
)
294 fread(((void *) node
) + offsetof(struct tree_node
, depth
),
295 sizeof(struct tree_node
) - offsetof(struct tree_node
, depth
),
298 /* Keep values in sane scale, otherwise we start overflowing. */
299 #define MAX_PLAYOUTS 10000000
300 if (node
->u
.playouts
> MAX_PLAYOUTS
) {
301 node
->u
.playouts
= MAX_PLAYOUTS
;
303 if (node
->amaf
.playouts
> MAX_PLAYOUTS
) {
304 node
->amaf
.playouts
= MAX_PLAYOUTS
;
306 memcpy(&node
->pamaf
, &node
->amaf
, sizeof(node
->amaf
));
307 memcpy(&node
->pu
, &node
->u
, sizeof(node
->u
));
309 struct tree_node
*ni
= NULL
, *ni_prev
= NULL
;
311 ni_prev
= ni
; ni
= calloc(1, sizeof(*ni
));
315 ni_prev
->sibling
= ni
;
317 tree_node_load(f
, ni
, num
);
322 tree_load(struct tree
*tree
, struct board
*b
)
324 char *filename
= tree_book_name(b
);
325 FILE *f
= fopen(filename
, "rb");
329 fprintf(stderr
, "Loading opening book %s...\n", filename
);
333 tree_node_load(f
, tree
->root
, &num
);
334 fprintf(stderr
, "Loaded %d nodes.\n", num
);
340 static struct tree_node
*
341 tree_node_copy(struct tree_node
*node
)
343 struct tree_node
*n2
= malloc(sizeof(*n2
));
347 struct tree_node
*ni
= node
->children
;
348 struct tree_node
*ni2
= tree_node_copy(ni
);
349 n2
->children
= ni2
; ni2
->parent
= n2
;
350 while ((ni
= ni
->sibling
)) {
351 ni2
->sibling
= tree_node_copy(ni
);
352 ni2
= ni2
->sibling
; ni2
->parent
= n2
;
358 tree_copy(struct tree
*tree
)
360 assert(!tree
->nodes
);
361 struct tree
*t2
= malloc(sizeof(*t2
));
363 t2
->root
= tree_node_copy(tree
->root
);
367 /* Copy the subtree rooted at node: all nodes at or below depth
368 * or with at least threshold playouts. Only for fast_alloc.
369 * The code is destructive on src. The relative order of children of
370 * a given node is preserved (assumed by tree_get_node in particular).
371 * Returns the copy of node in the destination tree, or NULL
372 * if we could not copy it. */
373 static struct tree_node
*
374 tree_prune(struct tree
*dest
, struct tree
*src
, struct tree_node
*node
,
375 int threshold
, int depth
)
377 assert(dest
->nodes
&& node
);
378 struct tree_node
*n2
= tree_fast_alloc_node(dest
);
382 if (n2
->depth
> dest
->max_depth
)
383 dest
->max_depth
= n2
->depth
;
385 n2
->is_expanded
= false;
387 if (node
->depth
>= depth
&& node
->u
.playouts
< threshold
)
389 /* For deep nodes with many playouts, we must copy all children,
390 * even those with zero playouts, because partially expanded
391 * nodes are not supported. Considering them as fully expanded
392 * would degrade the playing strength. The only exception is
393 * when dest becomes full, but this should never happen in practice
394 * if threshold is chosen to limit the number of nodes traversed. */
395 struct tree_node
*ni
= node
->children
;
398 struct tree_node
**prev2
= &(n2
->children
);
400 struct tree_node
*ni2
= tree_prune(dest
, src
, ni
, threshold
, depth
);
403 prev2
= &(ni2
->sibling
);
408 n2
->is_expanded
= true;
410 n2
->children
= NULL
; // avoid partially expanded nodes
415 /* The following constants are used for garbage collection of nodes.
416 * A tree is considered large if the top node has >= 40K playouts.
417 * For such trees, we copy deep nodes only if they have enough
418 * playouts, with a gradually increasing threshold up to 40.
419 * These constants define how much time we're willing to spend
420 * scanning the source tree when promoting a move. The chosen values
421 * make worst case pruning in about 3s for 20 GB ram, and this
422 * is only for long thinking time (>1M playouts). For fast games the
423 * trees don't grow large. For small ram or fast game we copy the
424 * entire tree. These values do not degrade playing strength and are
425 * necessary to avoid losing on time; increasing DEEP_PLAYOUTS_THRESHOLD
426 * or decreasing LARGE_TREE_PLAYOUTS will make the program faster but
428 #define LARGE_TREE_PLAYOUTS 40000LL
429 #define DEEP_PLAYOUTS_THRESHOLD 40
431 /* Garbage collect the tree early if the top node has < 5K playouts,
432 * to avoid having to do it later on a large subtree.
433 * This guarantees garbage collection in < 1s. */
434 #define SMALL_TREE_PLAYOUTS 5000
436 /* Free all the tree, keeping only the subtree rooted at node.
437 * Prune the subtree if necessary to fit in max_size bytes or
438 * to save time scanning the tree.
439 * Returns the moved node. Only for fast_alloc. */
441 tree_garbage_collect(struct tree
*tree
, unsigned long max_size
, struct tree_node
*node
)
443 assert(tree
->nodes
&& !node
->parent
&& !node
->sibling
);
444 double start_time
= time_now();
446 struct tree
*temp_tree
= tree_init(tree
->board
, tree
->root_color
, max_size
, tree
->ltree_aging
);
447 temp_tree
->nodes_size
= 0; // We do not want the dummy pass node
448 struct tree_node
*temp_node
;
450 /* Find the maximum depth at which we can copy all nodes. */
452 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
454 unsigned long nodes_size
= max_nodes
* sizeof(*node
);
455 int max_depth
= node
->depth
;
456 while (nodes_size
< max_size
&& max_nodes
> 1) {
458 nodes_size
+= max_nodes
* nodes_size
;
462 /* Copy all nodes for small trees. For large trees, copy all nodes
463 * with depth <= max_depth, and all nodes with enough playouts.
464 * Avoiding going too deep (except for nodes with many playouts) is mostly
465 * to save time scanning the source tree. It can take over 20s to traverse
466 * completely a large source tree (20 GB) even without copying because
467 * the traversal is not friendly at all with the memory cache. */
468 int threshold
= (node
->u
.playouts
- LARGE_TREE_PLAYOUTS
) * DEEP_PLAYOUTS_THRESHOLD
/ LARGE_TREE_PLAYOUTS
;
469 if (threshold
< 0) threshold
= 0;
470 if (threshold
> DEEP_PLAYOUTS_THRESHOLD
) threshold
= DEEP_PLAYOUTS_THRESHOLD
;
471 temp_node
= tree_prune(temp_tree
, tree
, node
, threshold
, max_depth
);
474 /* Now copy back to original tree. */
475 tree
->nodes_size
= 0;
477 struct tree_node
*new_node
= tree_prune(tree
, temp_tree
, temp_node
, 0, temp_tree
->max_depth
);
480 double now
= time_now();
481 static double prev_time
;
482 if (!prev_time
) prev_time
= start_time
;
484 "tree pruned in %0.6g s, prev %0.3g s ago, dest depth %d wanted %d,"
485 " max_size %lu, pruned size %lu, playouts %d\n",
486 now
- start_time
, start_time
- prev_time
, temp_tree
->max_depth
, max_depth
,
487 max_size
, temp_tree
->nodes_size
, new_node
->u
.playouts
);
488 prev_time
= start_time
;
490 if (temp_tree
->nodes_size
>= temp_tree
->max_tree_size
) {
491 fprintf(stderr
, "temp tree overflow, increase max_tree_size %lu or MIN_FREE_MEM_PERCENT %llu\n",
492 tree
->max_tree_size
, MIN_FREE_MEM_PERCENT
);
494 assert(tree
->nodes_size
== temp_tree
->nodes_size
);
495 assert(tree
->max_depth
== temp_tree
->max_depth
);
497 tree_done(temp_tree
);
503 tree_node_merge(struct tree_node
*dest
, struct tree_node
*src
)
505 /* Do not merge nodes that weren't touched at all. */
506 assert(dest
->pamaf
.playouts
== src
->pamaf
.playouts
);
507 assert(dest
->pu
.playouts
== src
->pu
.playouts
);
508 if (src
->amaf
.playouts
- src
->pamaf
.playouts
== 0
509 && src
->u
.playouts
- src
->pu
.playouts
== 0) {
513 dest
->hints
|= src
->hints
;
515 /* Merge the children, both are coord-sorted lists. */
516 struct tree_node
*di
= dest
->children
, **dref
= &dest
->children
;
517 struct tree_node
*si
= src
->children
, **sref
= &src
->children
;
519 if (di
->coord
!= si
->coord
) {
520 /* src has some extra items or misses di */
521 struct tree_node
*si2
= si
->sibling
;
522 while (si2
&& di
->coord
!= si2
->coord
) {
526 goto next_di
; /* src misses di, move on */
527 /* chain the extra [si,si2) items before di */
529 while (si
->sibling
!= si2
) {
538 /* Matching nodes - recurse... */
539 tree_node_merge(di
, si
);
540 /* ...and move on. */
541 sref
= &si
->sibling
; si
= si
->sibling
;
543 dref
= &di
->sibling
; di
= di
->sibling
;
546 /* Some outstanding nodes are left on src side, rechain
556 /* Priors should be constant. */
557 assert(dest
->prior
.playouts
== src
->prior
.playouts
&& dest
->prior
.value
== src
->prior
.value
);
559 stats_merge(&dest
->amaf
, &src
->amaf
);
560 stats_merge(&dest
->u
, &src
->u
);
563 /* Merge two trees built upon the same board. Note that the operation is
564 * destructive on src. */
566 tree_merge(struct tree
*dest
, struct tree
*src
)
568 if (src
->max_depth
> dest
->max_depth
)
569 dest
->max_depth
= src
->max_depth
;
570 tree_node_merge(dest
->root
, src
->root
);
575 tree_node_normalize(struct tree_node
*node
, int factor
)
577 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
578 tree_node_normalize(ni
, factor
);
580 #define normalize(s1, s2, t) node->s2.t = node->s1.t + (node->s2.t - node->s1.t) / factor;
581 normalize(pamaf
, amaf
, playouts
);
582 memcpy(&node
->pamaf
, &node
->amaf
, sizeof(node
->amaf
));
584 normalize(pu
, u
, playouts
);
585 memcpy(&node
->pu
, &node
->u
, sizeof(node
->u
));
589 /* Normalize a tree, dividing the amaf and u values by given
590 * factor; otherwise, simulations run in independent threads
591 * two trees built upon the same board. To correctly handle
592 * results taken from previous simulation run, they are backed
595 tree_normalize(struct tree
*tree
, int factor
)
597 tree_node_normalize(tree
->root
, factor
);
601 /* Get a node of given coordinate from within parent, possibly creating it
602 * if necessary - in a very raw form (no .d, priors, ...). */
603 /* FIXME: Adjust for board symmetry. */
605 tree_get_node(struct tree
*t
, struct tree_node
*parent
, coord_t c
, bool create
)
607 if (!parent
->children
|| parent
->children
->coord
>= c
) {
608 /* Special case: Insertion at the beginning. */
609 if (parent
->children
&& parent
->children
->coord
== c
)
610 return parent
->children
;
614 struct tree_node
*nn
= tree_init_node(t
, c
, parent
->depth
+ 1, false);
615 nn
->parent
= parent
; nn
->sibling
= parent
->children
;
616 parent
->children
= nn
;
620 /* No candidate at the beginning, look through all the children. */
622 struct tree_node
*ni
;
623 for (ni
= parent
->children
; ni
->sibling
; ni
= ni
->sibling
)
624 if (ni
->sibling
->coord
>= c
)
627 if (ni
->sibling
&& ni
->sibling
->coord
== c
)
629 assert(ni
->coord
< c
);
633 struct tree_node
*nn
= tree_init_node(t
, c
, parent
->depth
+ 1, false);
634 nn
->parent
= parent
; nn
->sibling
= ni
->sibling
; ni
->sibling
= nn
;
638 /* Get local tree node corresponding to given node, given local node child
639 * iterator @lni (which points either at the corresponding node, or at the
640 * nearest local tree node after @ni). */
642 tree_lnode_for_node(struct tree
*tree
, struct tree_node
*ni
, struct tree_node
*lni
, int tenuki_d
)
644 /* Now set up lnode, which is the actual local node
645 * corresponding to ni - either lni if it is an
646 * exact match and ni is not tenuki, <pass> local
647 * node if ni is tenuki, or NULL if there is no
648 * corresponding node available. */
650 if (is_pass(ni
->coord
)) {
651 /* Also, for sanity reasons we never use local
652 * tree for passes. (Maybe we could, but it's
653 * too hard to think about.) */
657 if (lni
->coord
== ni
->coord
) {
658 /* We don't consider tenuki a sequence play
659 * that we have in local tree even though
660 * ni->d is too high; this can happen if this
661 * occured in different board topology. */
665 if (ni
->d
>= tenuki_d
) {
666 /* Tenuki, pick a pass lsibling if available. */
667 assert(lni
->parent
&& lni
->parent
->children
);
668 if (is_pass(lni
->parent
->children
->coord
)) {
669 return lni
->parent
->children
;
675 /* No corresponding local node, lnode stays NULL. */
680 /* Tree symmetry: When possible, we will localize the tree to a single part
681 * of the board in tree_expand_node() and possibly flip along symmetry axes
682 * to another part of the board in tree_promote_at(). We follow b->symmetry
683 * guidelines here. */
686 /* This function must be thread safe, given that board b is only modified by the calling thread. */
688 tree_expand_node(struct tree
*t
, struct tree_node
*node
, struct board
*b
, enum stone color
, struct uct
*u
, int parity
)
690 /* Get a Common Fate Graph distance map from parent node. */
691 int distances
[board_size2(b
)];
692 if (!is_pass(b
->last_move
.coord
) && !is_resign(b
->last_move
.coord
)) {
693 cfg_distances(b
, node
->coord
, distances
, TREE_NODE_D_MAX
);
695 // Pass or resign - everything is too far.
696 foreach_point(b
) { distances
[c
] = TREE_NODE_D_MAX
+ 1; } foreach_point_end
;
699 /* Get a map of prior values to initialize the new nodes with. */
700 struct prior_map map
= {
703 .parity
= tree_parity(t
, parity
),
704 .distances
= distances
,
706 // Include pass in the prior map.
707 struct move_stats map_prior
[board_size2(b
) + 1]; map
.prior
= &map_prior
[1];
708 bool map_consider
[board_size2(b
) + 1]; map
.consider
= &map_consider
[1];
709 memset(map_prior
, 0, sizeof(map_prior
));
710 memset(map_consider
, 0, sizeof(map_consider
));
711 map
.consider
[pass
] = true;
713 if (board_at(b
, c
) != S_NONE
)
715 if (!board_is_valid_play(b
, color
, c
))
717 map
.consider
[c
] = true;
719 uct_prior(u
, node
, &map
);
721 /* Now, create the nodes. */
722 struct tree_node
*ni
= tree_init_node(t
, pass
, node
->depth
+ 1, t
->nodes
);
723 /* In fast_alloc mode we might temporarily run out of nodes but
724 * this should be rare if MIN_FREE_MEM_PERCENT is set correctly. */
726 node
->is_expanded
= false;
729 struct tree_node
*first_child
= ni
;
731 ni
->prior
= map
.prior
[pass
]; ni
->d
= TREE_NODE_D_MAX
+ 1;
733 /* The loop considers only the symmetry playground. */
735 fprintf(stderr
, "expanding %s within [%d,%d],[%d,%d] %d-%d\n",
736 coord2sstr(node
->coord
, b
),
737 b
->symmetry
.x1
, b
->symmetry
.y1
,
738 b
->symmetry
.x2
, b
->symmetry
.y2
,
739 b
->symmetry
.type
, b
->symmetry
.d
);
741 for (int j
= b
->symmetry
.y1
; j
<= b
->symmetry
.y2
; j
++) {
742 for (int i
= b
->symmetry
.x1
; i
<= b
->symmetry
.x2
; i
++) {
744 int x
= b
->symmetry
.type
== SYM_DIAG_DOWN
? board_size(b
) - 1 - i
: i
;
747 fprintf(stderr
, "drop %d,%d\n", i
, j
);
752 coord_t c
= coord_xy_otf(i
, j
, t
->board
);
753 if (!map
.consider
[c
]) // Filter out invalid moves
755 assert(c
!= node
->coord
); // I have spotted "C3 C3" in some sequence...
757 struct tree_node
*nj
= tree_init_node(t
, c
, node
->depth
+ 1, t
->nodes
);
759 node
->is_expanded
= false;
762 nj
->parent
= node
; ni
->sibling
= nj
; ni
= nj
;
764 ni
->prior
= map
.prior
[c
];
765 ni
->d
= distances
[c
];
768 node
->children
= first_child
; // must be done at the end to avoid race
773 flip_coord(struct board
*b
, coord_t c
,
774 bool flip_horiz
, bool flip_vert
, int flip_diag
)
776 int x
= coord_x(c
, b
), y
= coord_y(c
, b
);
778 int z
= x
; x
= y
; y
= z
;
781 x
= board_size(b
) - 1 - x
;
784 y
= board_size(b
) - 1 - y
;
786 return coord_xy_otf(x
, y
, b
);
790 tree_fix_node_symmetry(struct board
*b
, struct tree_node
*node
,
791 bool flip_horiz
, bool flip_vert
, int flip_diag
)
793 if (!is_pass(node
->coord
))
794 node
->coord
= flip_coord(b
, node
->coord
, flip_horiz
, flip_vert
, flip_diag
);
796 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
797 tree_fix_node_symmetry(b
, ni
, flip_horiz
, flip_vert
, flip_diag
);
801 tree_fix_symmetry(struct tree
*tree
, struct board
*b
, coord_t c
)
806 struct board_symmetry
*s
= &tree
->root_symmetry
;
807 int cx
= coord_x(c
, b
), cy
= coord_y(c
, b
);
809 /* playground X->h->v->d normalization
815 bool flip_horiz
= cx
< s
->x1
|| cx
> s
->x2
;
816 bool flip_vert
= cy
< s
->y1
|| cy
> s
->y2
;
820 bool dir
= (s
->type
== SYM_DIAG_DOWN
);
821 int x
= dir
^ flip_horiz
^ flip_vert
? board_size(b
) - 1 - cx
: cx
;
822 if (flip_vert
? x
< cy
: x
> cy
) {
828 fprintf(stderr
, "%s [%d,%d -> %d,%d;%d,%d] will flip %d %d %d -> %s, sym %d (%d) -> %d (%d)\n",
830 cx
, cy
, s
->x1
, s
->y1
, s
->x2
, s
->y2
,
831 flip_horiz
, flip_vert
, flip_diag
,
832 coord2sstr(flip_coord(b
, c
, flip_horiz
, flip_vert
, flip_diag
), b
),
833 s
->type
, s
->d
, b
->symmetry
.type
, b
->symmetry
.d
);
835 if (flip_horiz
|| flip_vert
|| flip_diag
)
836 tree_fix_node_symmetry(b
, tree
->root
, flip_horiz
, flip_vert
, flip_diag
);
841 tree_unlink_node(struct tree_node
*node
)
843 struct tree_node
*ni
= node
->parent
;
844 if (ni
->children
== node
) {
845 ni
->children
= node
->sibling
;
848 while (ni
->sibling
!= node
)
850 ni
->sibling
= node
->sibling
;
852 node
->sibling
= NULL
;
856 /* Reduce weight of statistics on promotion. Remove nodes that
857 * get reduced to zero playouts; returns next node to consider
858 * in the children list (@node may get deleted). */
859 static struct tree_node
*
860 tree_age_node(struct tree
*tree
, struct tree_node
*node
)
862 node
->u
.playouts
/= tree
->ltree_aging
;
863 if (node
->parent
&& !node
->u
.playouts
) {
864 struct tree_node
*sibling
= node
->sibling
;
865 /* Delete node, no playouts. */
866 tree_unlink_node(node
);
867 tree_done_node(tree
, node
);
871 struct tree_node
*ni
= node
->children
;
872 while (ni
) ni
= tree_age_node(tree
, ni
);
873 return node
->sibling
;
876 /* Promotes the given node as the root of the tree. In the fast_alloc
877 * mode, the node may be moved and some of its subtree may be pruned. */
879 tree_promote_node(struct tree
*tree
, struct tree_node
**node
)
881 assert((*node
)->parent
== tree
->root
);
882 tree_unlink_node(*node
);
884 /* Freeing the rest of the tree can take several seconds on large
885 * trees, so we must do it asynchronously: */
886 tree_done_node_detached(tree
, tree
->root
);
888 /* Garbage collect if we run out of memory, or it is cheap to do so now: */
889 unsigned long min_free_size
= (MIN_FREE_MEM_PERCENT
* tree
->max_tree_size
) / 100;
890 if (tree
->nodes_size
>= tree
->max_tree_size
- min_free_size
891 || (tree
->nodes_size
>= min_free_size
&& (*node
)->u
.playouts
< SMALL_TREE_PLAYOUTS
))
892 *node
= tree_garbage_collect(tree
, min_free_size
, *node
);
895 tree
->root_color
= stone_other(tree
->root_color
);
897 board_symmetry_update(tree
->board
, &tree
->root_symmetry
, (*node
)->coord
);
898 /* See tree.score description for explanation on why don't we zero
899 * score on node promotion. */
900 // tree->score.playouts = 0;
902 /* If the tree deepest node was under node, or if we called tree_garbage_collect,
903 * tree->max_depth is correct. Otherwise we could traverse the tree
904 * to recompute max_depth but it's not worth it: it's just for debugging
905 * and soon the tree will grow and max_depth will become correct again. */
907 if (tree
->ltree_aging
!= 1.0f
) { // XXX: != should work here even with the float
908 tree_age_node(tree
, tree
->ltree_black
);
909 tree_age_node(tree
, tree
->ltree_white
);
914 tree_promote_at(struct tree
*tree
, struct board
*b
, coord_t c
)
916 tree_fix_symmetry(tree
, b
, c
);
918 for (struct tree_node
*ni
= tree
->root
->children
; ni
; ni
= ni
->sibling
) {
919 if (ni
->coord
== c
) {
920 tree_promote_node(tree
, &ni
);