17 #include "uct/internal.h"
18 #include "uct/prior.h"
22 /* Allocate one node in the fast_alloc mode. The returned node
23 * is _not_ initialized. Returns NULL if not enough memory.
24 * This function may be called by multiple threads in parallel. */
25 static struct tree_node
*
26 tree_fast_alloc_node(struct tree
*t
)
28 assert(t
->nodes
!= NULL
);
29 struct tree_node
*n
= NULL
;
30 unsigned long old_size
=__sync_fetch_and_add(&t
->nodes_size
, sizeof(*n
));
32 /* The test below works even if max_tree_size is not a
33 * multiple of the node size because tree_init() allocates
34 * space for an extra node. */
35 if (old_size
< t
->max_tree_size
)
36 n
= (struct tree_node
*)(t
->nodes
+ old_size
);
40 /* Allocate and initialize a node. Returns NULL (fast_alloc mode)
41 * or exits the main program if not enough memory.
42 * This function may be called by multiple threads in parallel. */
43 static struct tree_node
*
44 tree_init_node(struct tree
*t
, coord_t coord
, int depth
, bool fast_alloc
)
48 n
= tree_fast_alloc_node(t
);
50 memset(n
, 0, sizeof(*n
));
52 n
= calloc(1, sizeof(*n
));
54 fprintf(stderr
, "tree_init_node(): OUT OF MEMORY\n");
57 __sync_fetch_and_add(&t
->nodes_size
, sizeof(*n
));
61 volatile static long c
= 1000000;
62 n
->hash
= __sync_fetch_and_add(&c
, 1);
63 if (depth
> t
->max_depth
)
68 /* Create a tree structure. Pre-allocate all nodes if max_tree_size is > 0. */
70 tree_init(struct board
*board
, enum stone color
, unsigned long max_tree_size
, float ltree_aging
)
72 struct tree
*t
= calloc(1, sizeof(*t
));
74 t
->max_tree_size
= max_tree_size
;
75 if (max_tree_size
!= 0) {
76 /* Allocate one extra node, max_tree_size may not be multiple of node size. */
77 t
->nodes
= malloc(max_tree_size
+ sizeof(struct tree_node
));
78 /* The nodes buffer doesn't need initialization. This is currently
79 * done by tree_init_node to spread the load. Doing a memset for the
80 * entire buffer here would be too slow for large trees (>10 GB). */
82 fprintf(stderr
, "tree_init(): OUT OF MEMORY\n");
86 /* The root PASS move is only virtual, we never play it. */
87 t
->root
= tree_init_node(t
, pass
, 0, t
->nodes
);
88 t
->root_symmetry
= board
->symmetry
;
89 t
->root_color
= stone_other(color
); // to research black moves, root will be white
91 t
->ltree_black
= tree_init_node(t
, pass
, 0, false);
92 t
->ltree_white
= tree_init_node(t
, pass
, 0, false);
93 t
->ltree_aging
= ltree_aging
;
98 /* This function may be called by multiple threads in parallel on the
99 * same tree, but not on node n. n may be detached from the tree but
100 * must have been created in this tree originally.
101 * It returns the remaining size of the tree after n has been freed. */
103 tree_done_node(struct tree
*t
, struct tree_node
*n
)
105 struct tree_node
*ni
= n
->children
;
107 struct tree_node
*nj
= ni
->sibling
;
108 tree_done_node(t
, ni
);
112 unsigned long old_size
= __sync_fetch_and_sub(&t
->nodes_size
, sizeof(*n
));
113 return old_size
- sizeof(*n
);
121 /* Worker thread for tree_done_node_detached(). Only for fast_alloc=false. */
123 tree_done_node_worker(void *ctx_
)
125 struct subtree_ctx
*ctx
= ctx_
;
126 char *str
= coord2str(ctx
->n
->coord
, ctx
->t
->board
);
128 unsigned long tree_size
= tree_done_node(ctx
->t
, ctx
->n
);
132 fprintf(stderr
, "done freeing node at %s, tree size %lu\n", str
, tree_size
);
138 /* Asynchronously free the subtree of nodes rooted at n. If the tree becomes
139 * empty free the tree also. Only for fast_alloc=false. */
141 tree_done_node_detached(struct tree
*t
, struct tree_node
*n
)
143 if (n
->u
.playouts
< 1000) { // no thread for small tree
144 if (!tree_done_node(t
, n
))
149 pthread_attr_init(&attr
);
150 pthread_attr_setdetachstate(&attr
, PTHREAD_CREATE_DETACHED
);
153 struct subtree_ctx
*ctx
= malloc(sizeof(struct subtree_ctx
));
155 fprintf(stderr
, "tree_done_node_detached(): OUT OF MEMORY\n");
160 pthread_create(&thread
, &attr
, tree_done_node_worker
, ctx
);
161 pthread_attr_destroy(&attr
);
165 tree_done(struct tree
*t
)
167 tree_done_node(t
, t
->ltree_black
);
168 tree_done_node(t
, t
->ltree_white
);
172 } else if (!tree_done_node(t
, t
->root
)) {
174 /* A tree_done_node_worker might still be running on this tree but
175 * it will free the tree later. It is also freeing nodes faster than
176 * we will create new ones. */
182 tree_node_dump(struct tree
*tree
, struct tree_node
*node
, int l
, int thres
)
184 for (int i
= 0; i
< l
; i
++) fputc(' ', stderr
);
186 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
188 /* We use 1 as parity, since for all nodes we want to know the
189 * win probability of _us_, not the node color. */
190 fprintf(stderr
, "[%s] %f %% %d [prior %f %% %d amaf %f %% %d]; hints %x; %d children <%"PRIhash
">\n",
191 coord2sstr(node
->coord
, tree
->board
),
192 tree_node_get_value(tree
, 1, node
->u
.value
), node
->u
.playouts
,
193 tree_node_get_value(tree
, 1, node
->prior
.value
), node
->prior
.playouts
,
194 tree_node_get_value(tree
, 1, node
->amaf
.value
), node
->amaf
.playouts
,
195 node
->hints
, children
, node
->hash
);
197 /* Print nodes sorted by #playouts. */
199 struct tree_node
*nbox
[1000]; int nboxl
= 0;
200 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
201 if (ni
->u
.playouts
> thres
)
206 for (int i
= 0; i
< nboxl
; i
++)
207 if (nbox
[i
] && (best
< 0 || nbox
[i
]->u
.playouts
> nbox
[best
]->u
.playouts
))
211 tree_node_dump(tree
, nbox
[best
], l
+ 1, /* node->u.value < 0.1 ? 0 : */ thres
);
217 tree_dump(struct tree
*tree
, int thres
)
219 if (thres
&& tree
->root
->u
.playouts
/ thres
> 100) {
220 /* Be a bit sensible about this; the opening book can create
221 * huge dumps at first. */
222 thres
= tree
->root
->u
.playouts
/ 100 * (thres
< 1000 ? 1 : thres
/ 1000);
224 fprintf(stderr
, "(UCT tree; root %s; extra komi %f)\n",
225 stone2str(tree
->root_color
), tree
->extra_komi
);
226 tree_node_dump(tree
, tree
->root
, 0, thres
);
228 if (DEBUGL(3) && tree
->ltree_black
) {
229 fprintf(stderr
, "B local tree:\n");
230 tree_node_dump(tree
, tree
->ltree_black
, 0, thres
);
231 fprintf(stderr
, "W local tree:\n");
232 tree_node_dump(tree
, tree
->ltree_white
, 0, thres
);
238 tree_book_name(struct board
*b
)
240 static char buf
[256];
241 if (b
->handicap
> 0) {
242 sprintf(buf
, "uctbook-%d-%02.01f-h%d.pachitree", b
->size
- 2, b
->komi
, b
->handicap
);
244 sprintf(buf
, "uctbook-%d-%02.01f.pachitree", b
->size
- 2, b
->komi
);
250 tree_node_save(FILE *f
, struct tree_node
*node
, int thres
)
252 bool save_children
= node
->u
.playouts
>= thres
;
255 node
->is_expanded
= 0;
258 fwrite(((void *) node
) + offsetof(struct tree_node
, depth
),
259 sizeof(struct tree_node
) - offsetof(struct tree_node
, depth
),
263 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
264 tree_node_save(f
, ni
, thres
);
267 node
->is_expanded
= 1;
274 tree_save(struct tree
*tree
, struct board
*b
, int thres
)
276 char *filename
= tree_book_name(b
);
277 FILE *f
= fopen(filename
, "wb");
282 tree_node_save(f
, tree
->root
, thres
);
289 tree_node_load(FILE *f
, struct tree_node
*node
, int *num
)
293 fread(((void *) node
) + offsetof(struct tree_node
, depth
),
294 sizeof(struct tree_node
) - offsetof(struct tree_node
, depth
),
297 /* Keep values in sane scale, otherwise we start overflowing. */
298 #define MAX_PLAYOUTS 10000000
299 if (node
->u
.playouts
> MAX_PLAYOUTS
) {
300 node
->u
.playouts
= MAX_PLAYOUTS
;
302 if (node
->amaf
.playouts
> MAX_PLAYOUTS
) {
303 node
->amaf
.playouts
= MAX_PLAYOUTS
;
305 memcpy(&node
->pamaf
, &node
->amaf
, sizeof(node
->amaf
));
306 memcpy(&node
->pu
, &node
->u
, sizeof(node
->u
));
308 struct tree_node
*ni
= NULL
, *ni_prev
= NULL
;
310 ni_prev
= ni
; ni
= calloc(1, sizeof(*ni
));
314 ni_prev
->sibling
= ni
;
316 tree_node_load(f
, ni
, num
);
321 tree_load(struct tree
*tree
, struct board
*b
)
323 char *filename
= tree_book_name(b
);
324 FILE *f
= fopen(filename
, "rb");
328 fprintf(stderr
, "Loading opening book %s...\n", filename
);
332 tree_node_load(f
, tree
->root
, &num
);
333 fprintf(stderr
, "Loaded %d nodes.\n", num
);
339 static struct tree_node
*
340 tree_node_copy(struct tree_node
*node
)
342 struct tree_node
*n2
= malloc(sizeof(*n2
));
346 struct tree_node
*ni
= node
->children
;
347 struct tree_node
*ni2
= tree_node_copy(ni
);
348 n2
->children
= ni2
; ni2
->parent
= n2
;
349 while ((ni
= ni
->sibling
)) {
350 ni2
->sibling
= tree_node_copy(ni
);
351 ni2
= ni2
->sibling
; ni2
->parent
= n2
;
357 tree_copy(struct tree
*tree
)
359 assert(!tree
->nodes
);
360 struct tree
*t2
= malloc(sizeof(*t2
));
362 t2
->root
= tree_node_copy(tree
->root
);
366 /* Copy the subtree rooted at node: all nodes at or below depth
367 * or with at least threshold playouts. Only for fast_alloc.
368 * The code is destructive on src. The relative order of children of
369 * a given node is preserved (assumed by tree_get_node in particular).
370 * Returns the copy of node in the destination tree, or NULL
371 * if we could not copy it. */
372 static struct tree_node
*
373 tree_prune(struct tree
*dest
, struct tree
*src
, struct tree_node
*node
,
374 int threshold
, int depth
)
376 assert(dest
->nodes
&& node
);
377 struct tree_node
*n2
= tree_fast_alloc_node(dest
);
381 if (n2
->depth
> dest
->max_depth
)
382 dest
->max_depth
= n2
->depth
;
384 n2
->is_expanded
= false;
386 if (node
->depth
>= depth
&& node
->u
.playouts
< threshold
)
388 /* For deep nodes with many playouts, we must copy all children,
389 * even those with zero playouts, because partially expanded
390 * nodes are not supported. Considering them as fully expanded
391 * would degrade the playing strength. The only exception is
392 * when dest becomes full, but this should never happen in practice
393 * if threshold is chosen to limit the number of nodes traversed. */
394 struct tree_node
*ni
= node
->children
;
395 struct tree_node
**prev2
= &(n2
->children
);
397 struct tree_node
*ni2
= tree_prune(dest
, src
, ni
, threshold
, depth
);
400 prev2
= &(ni2
->sibling
);
405 n2
->is_expanded
= true;
407 n2
->children
= NULL
; // avoid partially expanded nodes
412 /* The following constants are used for garbage collection of nodes.
413 * A tree is considered large if the top node has >= 40K playouts.
414 * For such trees, we copy deep nodes only if they have >= 40 playouts.
415 * These constants define how much time we're willing to spend
416 * scanning the source tree when promoting a move. The values 40K
417 * and 40 makes worst case pruning in about 3s for 20 GB ram, and this
418 * is only for long thinking time (>1M playouts). For fast games the
419 * trees don't grow large. For small ram or fast game we copy the
420 * entire tree. These values do not degrade playing strength and are
421 * necessary to avoid losing on time; increasing MIN_DEEP_PLAYOUTS
422 * or decreasing LARGE_TREE_PLAYOUTS will make the program faster but
424 #define LARGE_TREE_PLAYOUTS 40000
425 #define MIN_DEEP_PLAYOUTS 40
427 /* Free all the tree, keeping only the subtree rooted at node.
428 * Prune the subtree if necessary to fit in max_size bytes or
429 * to save time scanning the tree.
430 * Returns the moved node. Only for fast_alloc. */
431 static struct tree_node
*
432 tree_garbage_collect(struct tree
*tree
, unsigned long max_size
, struct tree_node
*node
)
434 assert(tree
->nodes
&& !node
->parent
&& !node
->sibling
);
435 double start_time
= time_now();
437 struct tree
*temp_tree
= tree_init(tree
->board
, tree
->root_color
, max_size
, tree
->ltree_aging
);
438 temp_tree
->nodes_size
= 0; // We do not want the dummy pass node
439 struct tree_node
*temp_node
;
441 /* Find the maximum depth at which we can copy all nodes. */
443 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
445 unsigned long nodes_size
= max_nodes
* sizeof(*node
);
446 int max_depth
= node
->depth
;
447 while (nodes_size
< max_size
&& max_nodes
> 1) {
449 nodes_size
+= max_nodes
* nodes_size
;
453 /* Copy all nodes for small trees. For large trees, copy all nodes
454 * with depth <= max_depth, and all nodes with at least MIN_DEEP_PLAYOUTS.
455 * Avoiding going too deep (except for nodes with many playouts) is mostly
456 * to save time scanning the source tree. It can take over 20s to traverse
457 * completely a large source tree (20 GB) even without copying because
458 * the traversal is not friendly at all with the memory cache. */
459 if (node
->u
.playouts
< LARGE_TREE_PLAYOUTS
) {
460 temp_node
= tree_prune(temp_tree
, tree
, node
, 0, max_depth
+ 20);
462 temp_node
= tree_prune(temp_tree
, tree
, node
, MIN_DEEP_PLAYOUTS
, max_depth
);
466 /* Now copy back to original tree. */
467 tree
->nodes_size
= 0;
469 struct tree_node
*new_node
= tree_prune(tree
, temp_tree
, temp_node
, 0, temp_tree
->max_depth
);
472 double now
= time_now();
473 static double prev_time
;
474 if (!prev_time
) prev_time
= start_time
;
476 "tree pruned in %0.6g s, prev %0.3g s ago, dest depth %d wanted %d,"
477 " max_size %lu, pruned size %lu, playouts %d\n",
478 now
- start_time
, start_time
- prev_time
, temp_tree
->max_depth
, max_depth
,
479 max_size
, temp_tree
->nodes_size
, new_node
->u
.playouts
);
480 prev_time
= start_time
;
482 if (temp_tree
->nodes_size
>= temp_tree
->max_tree_size
) {
483 fprintf(stderr
, "temp tree overflow, increase max_tree_size %lu or MIN_FREE_MEM_PERCENT %d\n",
484 tree
->max_tree_size
, MIN_FREE_MEM_PERCENT
);
486 assert(tree
->nodes_size
== temp_tree
->nodes_size
);
487 assert(tree
->max_depth
== temp_tree
->max_depth
);
489 tree_done(temp_tree
);
495 tree_node_merge(struct tree_node
*dest
, struct tree_node
*src
)
497 /* Do not merge nodes that weren't touched at all. */
498 assert(dest
->pamaf
.playouts
== src
->pamaf
.playouts
);
499 assert(dest
->pu
.playouts
== src
->pu
.playouts
);
500 if (src
->amaf
.playouts
- src
->pamaf
.playouts
== 0
501 && src
->u
.playouts
- src
->pu
.playouts
== 0) {
505 dest
->hints
|= src
->hints
;
507 /* Merge the children, both are coord-sorted lists. */
508 struct tree_node
*di
= dest
->children
, **dref
= &dest
->children
;
509 struct tree_node
*si
= src
->children
, **sref
= &src
->children
;
511 if (di
->coord
!= si
->coord
) {
512 /* src has some extra items or misses di */
513 struct tree_node
*si2
= si
->sibling
;
514 while (si2
&& di
->coord
!= si2
->coord
) {
518 goto next_di
; /* src misses di, move on */
519 /* chain the extra [si,si2) items before di */
521 while (si
->sibling
!= si2
) {
530 /* Matching nodes - recurse... */
531 tree_node_merge(di
, si
);
532 /* ...and move on. */
533 sref
= &si
->sibling
; si
= si
->sibling
;
535 dref
= &di
->sibling
; di
= di
->sibling
;
538 /* Some outstanding nodes are left on src side, rechain
548 /* Priors should be constant. */
549 assert(dest
->prior
.playouts
== src
->prior
.playouts
&& dest
->prior
.value
== src
->prior
.value
);
551 stats_merge(&dest
->amaf
, &src
->amaf
);
552 stats_merge(&dest
->u
, &src
->u
);
555 /* Merge two trees built upon the same board. Note that the operation is
556 * destructive on src. */
558 tree_merge(struct tree
*dest
, struct tree
*src
)
560 if (src
->max_depth
> dest
->max_depth
)
561 dest
->max_depth
= src
->max_depth
;
562 tree_node_merge(dest
->root
, src
->root
);
567 tree_node_normalize(struct tree_node
*node
, int factor
)
569 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
570 tree_node_normalize(ni
, factor
);
572 #define normalize(s1, s2, t) node->s2.t = node->s1.t + (node->s2.t - node->s1.t) / factor;
573 normalize(pamaf
, amaf
, playouts
);
574 memcpy(&node
->pamaf
, &node
->amaf
, sizeof(node
->amaf
));
576 normalize(pu
, u
, playouts
);
577 memcpy(&node
->pu
, &node
->u
, sizeof(node
->u
));
581 /* Normalize a tree, dividing the amaf and u values by given
582 * factor; otherwise, simulations run in independent threads
583 * two trees built upon the same board. To correctly handle
584 * results taken from previous simulation run, they are backed
587 tree_normalize(struct tree
*tree
, int factor
)
589 tree_node_normalize(tree
->root
, factor
);
593 /* Get a node of given coordinate from within parent, possibly creating it
594 * if necessary - in a very raw form (no .d, priors, ...). */
595 /* FIXME: Adjust for board symmetry. */
597 tree_get_node(struct tree
*t
, struct tree_node
*parent
, coord_t c
, bool create
)
599 if (!parent
->children
|| parent
->children
->coord
>= c
) {
600 /* Special case: Insertion at the beginning. */
601 if (parent
->children
&& parent
->children
->coord
== c
)
602 return parent
->children
;
606 struct tree_node
*nn
= tree_init_node(t
, c
, parent
->depth
+ 1, false);
607 nn
->parent
= parent
; nn
->sibling
= parent
->children
;
608 parent
->children
= nn
;
612 /* No candidate at the beginning, look through all the children. */
614 struct tree_node
*ni
;
615 for (ni
= parent
->children
; ni
->sibling
; ni
= ni
->sibling
)
616 if (ni
->sibling
->coord
>= c
)
619 if (ni
->sibling
&& ni
->sibling
->coord
== c
)
621 assert(ni
->coord
< c
);
625 struct tree_node
*nn
= tree_init_node(t
, c
, parent
->depth
+ 1, false);
626 nn
->parent
= parent
; nn
->sibling
= ni
->sibling
; ni
->sibling
= nn
;
630 /* Get local tree node corresponding to given node, given local node child
631 * iterator @lni (which points either at the corresponding node, or at the
632 * nearest local tree node after @ni). */
634 tree_lnode_for_node(struct tree
*tree
, struct tree_node
*ni
, struct tree_node
*lni
, int tenuki_d
)
636 /* Now set up lnode, which is the actual local node
637 * corresponding to ni - either lni if it is an
638 * exact match and ni is not tenuki, <pass> local
639 * node if ni is tenuki, or NULL if there is no
640 * corresponding node available. */
642 if (is_pass(ni
->coord
)) {
643 /* Also, for sanity reasons we never use local
644 * tree for passes. (Maybe we could, but it's
645 * too hard to think about.) */
649 if (lni
->coord
== ni
->coord
) {
650 /* We don't consider tenuki a sequence play
651 * that we have in local tree even though
652 * ni->d is too high; this can happen if this
653 * occured in different board topology. */
657 if (ni
->d
>= tenuki_d
) {
658 /* Tenuki, pick a pass lsibling if available. */
659 assert(lni
->parent
&& lni
->parent
->children
);
660 if (is_pass(lni
->parent
->children
->coord
)) {
661 return lni
->parent
->children
;
667 /* No corresponding local node, lnode stays NULL. */
672 /* Tree symmetry: When possible, we will localize the tree to a single part
673 * of the board in tree_expand_node() and possibly flip along symmetry axes
674 * to another part of the board in tree_promote_at(). We follow b->symmetry
675 * guidelines here. */
678 /* This function must be thread safe, given that board b is only modified by the calling thread. */
680 tree_expand_node(struct tree
*t
, struct tree_node
*node
, struct board
*b
, enum stone color
, struct uct
*u
, int parity
)
682 /* Get a Common Fate Graph distance map from parent node. */
683 int distances
[board_size2(b
)];
684 if (!is_pass(b
->last_move
.coord
) && !is_resign(b
->last_move
.coord
)) {
685 cfg_distances(b
, node
->coord
, distances
, TREE_NODE_D_MAX
);
687 // Pass or resign - everything is too far.
688 foreach_point(b
) { distances
[c
] = TREE_NODE_D_MAX
+ 1; } foreach_point_end
;
691 /* Get a map of prior values to initialize the new nodes with. */
692 struct prior_map map
= {
695 .parity
= tree_parity(t
, parity
),
696 .distances
= distances
,
698 // Include pass in the prior map.
699 struct move_stats map_prior
[board_size2(b
) + 1]; map
.prior
= &map_prior
[1];
700 bool map_consider
[board_size2(b
) + 1]; map
.consider
= &map_consider
[1];
701 memset(map_prior
, 0, sizeof(map_prior
));
702 memset(map_consider
, 0, sizeof(map_consider
));
703 struct move pm
= { .color
= color
};
704 map
.consider
[pass
] = true;
706 if (board_at(b
, c
) != S_NONE
)
709 if (!board_is_valid_move(b
, &pm
))
711 map
.consider
[c
] = true;
713 uct_prior(u
, node
, &map
);
715 /* Now, create the nodes. */
716 struct tree_node
*ni
= tree_init_node(t
, pass
, node
->depth
+ 1, t
->nodes
);
717 /* In fast_alloc mode we might temporarily run out of nodes but
718 * this should be rare if MIN_FREE_MEM_PERCENT is set correctly. */
720 node
->is_expanded
= false;
723 struct tree_node
*first_child
= ni
;
725 ni
->prior
= map
.prior
[pass
]; ni
->d
= TREE_NODE_D_MAX
+ 1;
727 /* The loop considers only the symmetry playground. */
729 fprintf(stderr
, "expanding %s within [%d,%d],[%d,%d] %d-%d\n",
730 coord2sstr(node
->coord
, b
),
731 b
->symmetry
.x1
, b
->symmetry
.y1
,
732 b
->symmetry
.x2
, b
->symmetry
.y2
,
733 b
->symmetry
.type
, b
->symmetry
.d
);
735 for (int j
= b
->symmetry
.y1
; j
<= b
->symmetry
.y2
; j
++) {
736 for (int i
= b
->symmetry
.x1
; i
<= b
->symmetry
.x2
; i
++) {
738 int x
= b
->symmetry
.type
== SYM_DIAG_DOWN
? board_size(b
) - 1 - i
: i
;
741 fprintf(stderr
, "drop %d,%d\n", i
, j
);
746 coord_t c
= coord_xy_otf(i
, j
, t
->board
);
747 if (!map
.consider
[c
]) // Filter out invalid moves
749 assert(c
!= node
->coord
); // I have spotted "C3 C3" in some sequence...
751 struct tree_node
*nj
= tree_init_node(t
, c
, node
->depth
+ 1, t
->nodes
);
753 node
->is_expanded
= false;
756 nj
->parent
= node
; ni
->sibling
= nj
; ni
= nj
;
758 ni
->prior
= map
.prior
[c
];
759 ni
->d
= distances
[c
];
762 node
->children
= first_child
; // must be done at the end to avoid race
767 flip_coord(struct board
*b
, coord_t c
,
768 bool flip_horiz
, bool flip_vert
, int flip_diag
)
770 int x
= coord_x(c
, b
), y
= coord_y(c
, b
);
772 int z
= x
; x
= y
; y
= z
;
775 x
= board_size(b
) - 1 - x
;
778 y
= board_size(b
) - 1 - y
;
780 return coord_xy_otf(x
, y
, b
);
784 tree_fix_node_symmetry(struct board
*b
, struct tree_node
*node
,
785 bool flip_horiz
, bool flip_vert
, int flip_diag
)
787 if (!is_pass(node
->coord
))
788 node
->coord
= flip_coord(b
, node
->coord
, flip_horiz
, flip_vert
, flip_diag
);
790 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
791 tree_fix_node_symmetry(b
, ni
, flip_horiz
, flip_vert
, flip_diag
);
795 tree_fix_symmetry(struct tree
*tree
, struct board
*b
, coord_t c
)
800 struct board_symmetry
*s
= &tree
->root_symmetry
;
801 int cx
= coord_x(c
, b
), cy
= coord_y(c
, b
);
803 /* playground X->h->v->d normalization
809 bool flip_horiz
= cx
< s
->x1
|| cx
> s
->x2
;
810 bool flip_vert
= cy
< s
->y1
|| cy
> s
->y2
;
814 bool dir
= (s
->type
== SYM_DIAG_DOWN
);
815 int x
= dir
^ flip_horiz
^ flip_vert
? board_size(b
) - 1 - cx
: cx
;
816 if (flip_vert
? x
< cy
: x
> cy
) {
822 fprintf(stderr
, "%s [%d,%d -> %d,%d;%d,%d] will flip %d %d %d -> %s, sym %d (%d) -> %d (%d)\n",
824 cx
, cy
, s
->x1
, s
->y1
, s
->x2
, s
->y2
,
825 flip_horiz
, flip_vert
, flip_diag
,
826 coord2sstr(flip_coord(b
, c
, flip_horiz
, flip_vert
, flip_diag
), b
),
827 s
->type
, s
->d
, b
->symmetry
.type
, b
->symmetry
.d
);
829 if (flip_horiz
|| flip_vert
|| flip_diag
)
830 tree_fix_node_symmetry(b
, tree
->root
, flip_horiz
, flip_vert
, flip_diag
);
835 tree_unlink_node(struct tree_node
*node
)
837 struct tree_node
*ni
= node
->parent
;
838 if (ni
->children
== node
) {
839 ni
->children
= node
->sibling
;
842 while (ni
->sibling
!= node
)
844 ni
->sibling
= node
->sibling
;
846 node
->sibling
= NULL
;
850 /* Reduce weight of statistics on promotion. Remove nodes that
851 * get reduced to zero playouts; returns next node to consider
852 * in the children list (@node may get deleted). */
853 static struct tree_node
*
854 tree_age_node(struct tree
*tree
, struct tree_node
*node
)
856 node
->u
.playouts
/= tree
->ltree_aging
;
857 if (node
->parent
&& !node
->u
.playouts
) {
858 struct tree_node
*sibling
= node
->sibling
;
859 /* Delete node, no playouts. */
860 tree_unlink_node(node
);
861 tree_done_node(tree
, node
);
865 struct tree_node
*ni
= node
->children
;
866 while (ni
) ni
= tree_age_node(tree
, ni
);
867 return node
->sibling
;
870 /* Promotes the given node as the root of the tree. In the fast_alloc
871 * mode, the node may be moved and some of its subtree may be pruned. */
873 tree_promote_node(struct tree
*tree
, struct tree_node
**node
)
875 assert((*node
)->parent
== tree
->root
);
876 tree_unlink_node(*node
);
878 /* Freeing the rest of the tree can take several seconds on large
879 * trees, so we must do it asynchronously: */
880 tree_done_node_detached(tree
, tree
->root
);
882 unsigned long min_free_size
= (MIN_FREE_MEM_PERCENT
* tree
->max_tree_size
) / 100;
883 if (tree
->nodes_size
>= tree
->max_tree_size
- min_free_size
)
884 *node
= tree_garbage_collect(tree
, min_free_size
, *node
);
885 /* If we still have enough free memory, we will free everything later. */
888 tree
->root_color
= stone_other(tree
->root_color
);
889 board_symmetry_update(tree
->board
, &tree
->root_symmetry
, (*node
)->coord
);
891 /* If the tree deepest node was under node, or if we called tree_garbage_collect,
892 * tree->max_depth is correct. Otherwise we could traverse the tree
893 * to recompute max_depth but it's not worth it: it's just for debugging
894 * and soon the tree will grow and max_depth will become correct again. */
896 if (tree
->ltree_aging
!= 1.0f
) { // XXX: != should work here even with the float
897 tree_age_node(tree
, tree
->ltree_black
);
898 tree_age_node(tree
, tree
->ltree_white
);
903 tree_promote_at(struct tree
*tree
, struct board
*b
, coord_t c
)
905 tree_fix_symmetry(tree
, b
, c
);
907 for (struct tree_node
*ni
= tree
->root
->children
; ni
; ni
= ni
->sibling
) {
908 if (ni
->coord
== c
) {
909 tree_promote_node(tree
, &ni
);