1 /* Generic routines for manipulating PHIs
2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "fold-const.h"
28 #include "gimple-iterator.h"
31 /* Rewriting a function into SSA form can create a huge number of PHIs
32 many of which may be thrown away shortly after their creation if jumps
33 were threaded through PHI nodes.
35 While our garbage collection mechanisms will handle this situation, it
36 is extremely wasteful to create nodes and throw them away, especially
37 when the nodes can be reused.
39 For PR 8361, we can significantly reduce the number of nodes allocated
40 and thus the total amount of memory allocated by managing PHIs a
41 little. This additionally helps reduce the amount of work done by the
42 garbage collector. Similar results have been seen on a wider variety
43 of tests (such as the compiler itself).
45 PHI nodes have different sizes, so we can't have a single list of all
46 the PHI nodes as it would be too expensive to walk down that list to
47 find a PHI of a suitable size.
49 Instead we have an array of lists of free PHI nodes. The array is
50 indexed by the number of PHI alternatives that PHI node can hold.
51 Except for the last array member, which holds all remaining PHI
54 So to find a free PHI node, we compute its index into the free PHI
55 node array and see if there are any elements with an exact match.
56 If so, then we are done. Otherwise, we test the next larger size
57 up and continue until we are in the last array element.
59 We do not actually walk members of the last array element. While it
60 might allow us to pick up a few reusable PHI nodes, it could potentially
61 be very expensive if the program has released a bunch of large PHI nodes,
62 but keeps asking for even larger PHI nodes. Experiments have shown that
63 walking the elements of the last array entry would result in finding less
64 than .1% additional reusable PHI nodes.
66 Note that we can never have less than two PHI argument slots. Thus,
67 the -2 on all the calculations below. */
69 #define NUM_BUCKETS 10
70 static GTY ((deletable (""))) vec
<gimple
*, va_gc
> *free_phinodes
[NUM_BUCKETS
- 2];
71 static unsigned long free_phinode_count
;
73 static int ideal_phi_node_len (int);
75 unsigned int phi_nodes_reused
;
76 unsigned int phi_nodes_created
;
78 /* Dump some simple statistics regarding the re-use of PHI nodes. */
81 phinodes_print_statistics (void)
83 fprintf (stderr
, "PHI nodes allocated: %u\n", phi_nodes_created
);
84 fprintf (stderr
, "PHI nodes reused: %u\n", phi_nodes_reused
);
87 /* Allocate a PHI node with at least LEN arguments. If the free list
88 happens to contain a PHI node with LEN arguments or more, return
92 allocate_phi_node (size_t len
)
95 size_t bucket
= NUM_BUCKETS
- 2;
96 size_t size
= sizeof (struct gphi
)
97 + (len
- 1) * sizeof (struct phi_arg_d
);
99 if (free_phinode_count
)
100 for (bucket
= len
- 2; bucket
< NUM_BUCKETS
- 2; bucket
++)
101 if (free_phinodes
[bucket
])
104 /* If our free list has an element, then use it. */
105 if (bucket
< NUM_BUCKETS
- 2
106 && gimple_phi_capacity ((*free_phinodes
[bucket
])[0]) >= len
)
108 free_phinode_count
--;
109 phi
= as_a
<gphi
*> (free_phinodes
[bucket
]->pop ());
110 if (free_phinodes
[bucket
]->is_empty ())
111 vec_free (free_phinodes
[bucket
]);
112 if (GATHER_STATISTICS
)
117 phi
= static_cast <gphi
*> (ggc_internal_alloc (size
));
118 if (GATHER_STATISTICS
)
120 enum gimple_alloc_kind kind
= gimple_alloc_kind (GIMPLE_PHI
);
122 gimple_alloc_counts
[(int) kind
]++;
123 gimple_alloc_sizes
[(int) kind
] += size
;
130 /* Given LEN, the original number of requested PHI arguments, return
131 a new, "ideal" length for the PHI node. The "ideal" length rounds
132 the total size of the PHI node up to the next power of two bytes.
134 Rounding up will not result in wasting any memory since the size request
135 will be rounded up by the GC system anyway. [ Note this is not entirely
136 true since the original length might have fit on one of the special
137 GC pages. ] By rounding up, we may avoid the need to reallocate the
138 PHI node later if we increase the number of arguments for the PHI. */
141 ideal_phi_node_len (int len
)
143 size_t size
, new_size
;
146 /* We do not support allocations of less than two PHI argument slots. */
150 /* Compute the number of bytes of the original request. */
151 size
= sizeof (struct gphi
)
152 + (len
- 1) * sizeof (struct phi_arg_d
);
154 /* Round it up to the next power of two. */
155 log2
= ceil_log2 (size
);
156 new_size
= 1 << log2
;
158 /* Now compute and return the number of PHI argument slots given an
159 ideal size allocation. */
160 new_len
= len
+ (new_size
- size
) / sizeof (struct phi_arg_d
);
164 /* Return a PHI node with LEN argument slots for variable VAR. */
167 make_phi_node (tree var
, int len
)
172 capacity
= ideal_phi_node_len (len
);
174 phi
= allocate_phi_node (capacity
);
176 /* We need to clear the entire PHI node, including the argument
177 portion, because we represent a "missing PHI argument" by placing
178 NULL_TREE in PHI_ARG_DEF. */
179 memset (phi
, 0, (sizeof (struct gphi
)
180 - sizeof (struct phi_arg_d
)
181 + sizeof (struct phi_arg_d
) * len
));
182 phi
->code
= GIMPLE_PHI
;
183 gimple_init_singleton (phi
);
185 phi
->capacity
= capacity
;
188 else if (TREE_CODE (var
) == SSA_NAME
)
189 gimple_phi_set_result (phi
, var
);
191 gimple_phi_set_result (phi
, make_ssa_name (var
, phi
));
193 for (i
= 0; i
< capacity
; i
++)
197 gimple_phi_arg_set_location (phi
, i
, UNKNOWN_LOCATION
);
198 imm
= gimple_phi_arg_imm_use_ptr (phi
, i
);
199 imm
->use
= gimple_phi_arg_def_ptr (phi
, i
);
208 /* We no longer need PHI, release it so that it may be reused. */
211 release_phi_node (gimple
*phi
)
214 size_t len
= gimple_phi_capacity (phi
);
217 for (x
= 0; x
< gimple_phi_num_args (phi
); x
++)
220 imm
= gimple_phi_arg_imm_use_ptr (phi
, x
);
221 delink_imm_use (imm
);
224 bucket
= len
> NUM_BUCKETS
- 1 ? NUM_BUCKETS
- 1 : len
;
226 vec_safe_push (free_phinodes
[bucket
], phi
);
227 free_phinode_count
++;
231 /* Resize an existing PHI node. The only way is up. Return the
232 possibly relocated phi. */
235 resize_phi_node (gphi
*phi
, size_t len
)
240 gcc_assert (len
> gimple_phi_capacity (phi
));
242 /* The garbage collector will not look at the PHI node beyond the
243 first PHI_NUM_ARGS elements. Therefore, all we have to copy is a
244 portion of the PHI node currently in use. */
245 old_size
= sizeof (struct gphi
)
246 + (gimple_phi_num_args (phi
) - 1) * sizeof (struct phi_arg_d
);
248 new_phi
= allocate_phi_node (len
);
250 memcpy (new_phi
, phi
, old_size
);
252 for (i
= 0; i
< gimple_phi_num_args (new_phi
); i
++)
254 use_operand_p imm
, old_imm
;
255 imm
= gimple_phi_arg_imm_use_ptr (new_phi
, i
);
256 old_imm
= gimple_phi_arg_imm_use_ptr (phi
, i
);
257 imm
->use
= gimple_phi_arg_def_ptr (new_phi
, i
);
258 relink_imm_use_stmt (imm
, old_imm
, new_phi
);
261 new_phi
->capacity
= len
;
263 for (i
= gimple_phi_num_args (new_phi
); i
< len
; i
++)
267 gimple_phi_arg_set_location (new_phi
, i
, UNKNOWN_LOCATION
);
268 imm
= gimple_phi_arg_imm_use_ptr (new_phi
, i
);
269 imm
->use
= gimple_phi_arg_def_ptr (new_phi
, i
);
272 imm
->loc
.stmt
= new_phi
;
278 /* Reserve PHI arguments for a new edge to basic block BB. */
281 reserve_phi_args_for_new_edge (basic_block bb
)
283 size_t len
= EDGE_COUNT (bb
->preds
);
284 size_t cap
= ideal_phi_node_len (len
+ 4);
287 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
289 gphi
*stmt
= gsi
.phi ();
291 if (len
> gimple_phi_capacity (stmt
))
293 gphi
*new_phi
= resize_phi_node (stmt
, cap
);
295 /* The result of the PHI is defined by this PHI node. */
296 SSA_NAME_DEF_STMT (gimple_phi_result (new_phi
)) = new_phi
;
297 gsi_set_stmt (&gsi
, new_phi
);
299 release_phi_node (stmt
);
303 /* We represent a "missing PHI argument" by placing NULL_TREE in
304 the corresponding slot. If PHI arguments were added
305 immediately after an edge is created, this zeroing would not
306 be necessary, but unfortunately this is not the case. For
307 example, the loop optimizer duplicates several basic blocks,
308 redirects edges, and then fixes up PHI arguments later in
310 SET_PHI_ARG_DEF (stmt
, len
- 1, NULL_TREE
);
311 gimple_phi_arg_set_location (stmt
, len
- 1, UNKNOWN_LOCATION
);
317 /* Adds PHI to BB. */
320 add_phi_node_to_bb (gphi
*phi
, basic_block bb
)
322 gimple_seq seq
= phi_nodes (bb
);
323 /* Add the new PHI node to the list of PHI nodes for block BB. */
325 set_phi_nodes (bb
, gimple_seq_alloc_with_stmt (phi
));
328 gimple_seq_add_stmt (&seq
, phi
);
329 gcc_assert (seq
== phi_nodes (bb
));
332 /* Associate BB to the PHI node. */
333 gimple_set_bb (phi
, bb
);
337 /* Create a new PHI node for variable VAR at basic block BB. */
340 create_phi_node (tree var
, basic_block bb
)
342 gphi
*phi
= make_phi_node (var
, EDGE_COUNT (bb
->preds
));
344 add_phi_node_to_bb (phi
, bb
);
349 /* Add a new argument to PHI node PHI. DEF is the incoming reaching
350 definition and E is the edge through which DEF reaches PHI. The new
351 argument is added at the end of the argument list.
352 If PHI has reached its maximum capacity, add a few slots. In this case,
353 PHI points to the reallocated phi node when we return. */
356 add_phi_arg (gphi
*phi
, tree def
, edge e
, source_location locus
)
358 basic_block bb
= e
->dest
;
360 gcc_assert (bb
== gimple_bb (phi
));
362 /* We resize PHI nodes upon edge creation. We should always have
363 enough room at this point. */
364 gcc_assert (gimple_phi_num_args (phi
) <= gimple_phi_capacity (phi
));
366 /* We resize PHI nodes upon edge creation. We should always have
367 enough room at this point. */
368 gcc_assert (e
->dest_idx
< gimple_phi_num_args (phi
));
370 /* Copy propagation needs to know what object occur in abnormal
371 PHI nodes. This is a convenient place to record such information. */
372 if (e
->flags
& EDGE_ABNORMAL
)
374 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
) = 1;
375 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)) = 1;
378 SET_PHI_ARG_DEF (phi
, e
->dest_idx
, def
);
379 gimple_phi_arg_set_location (phi
, e
->dest_idx
, locus
);
383 /* Remove the Ith argument from PHI's argument list. This routine
384 implements removal by swapping the last alternative with the
385 alternative we want to delete and then shrinking the vector, which
386 is consistent with how we remove an edge from the edge vector. */
389 remove_phi_arg_num (gphi
*phi
, int i
)
391 int num_elem
= gimple_phi_num_args (phi
);
393 gcc_assert (i
< num_elem
);
395 /* Delink the item which is being removed. */
396 delink_imm_use (gimple_phi_arg_imm_use_ptr (phi
, i
));
398 /* If it is not the last element, move the last element
399 to the element we want to delete, resetting all the links. */
400 if (i
!= num_elem
- 1)
402 use_operand_p old_p
, new_p
;
403 old_p
= gimple_phi_arg_imm_use_ptr (phi
, num_elem
- 1);
404 new_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
405 /* Set use on new node, and link into last element's place. */
406 *(new_p
->use
) = *(old_p
->use
);
407 relink_imm_use (new_p
, old_p
);
408 /* Move the location as well. */
409 gimple_phi_arg_set_location (phi
, i
,
410 gimple_phi_arg_location (phi
, num_elem
- 1));
413 /* Shrink the vector and return. Note that we do not have to clear
414 PHI_ARG_DEF because the garbage collector will not look at those
415 elements beyond the first PHI_NUM_ARGS elements of the array. */
420 /* Remove all PHI arguments associated with edge E. */
423 remove_phi_args (edge e
)
427 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
428 remove_phi_arg_num (gsi
.phi (),
433 /* Remove the PHI node pointed-to by iterator GSI from basic block BB. After
434 removal, iterator GSI is updated to point to the next PHI node in the
435 sequence. If RELEASE_LHS_P is true, the LHS of this PHI node is released
436 into the free pool of SSA names. */
439 remove_phi_node (gimple_stmt_iterator
*gsi
, bool release_lhs_p
)
441 gimple
*phi
= gsi_stmt (*gsi
);
444 insert_debug_temps_for_defs (gsi
);
446 gsi_remove (gsi
, false);
448 /* If we are deleting the PHI node, then we should release the
449 SSA_NAME node so that it can be reused. */
450 release_phi_node (phi
);
452 release_ssa_name (gimple_phi_result (phi
));
455 /* Remove all the phi nodes from BB. */
458 remove_phi_nodes (basic_block bb
)
462 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); )
463 remove_phi_node (&gsi
, true);
465 set_phi_nodes (bb
, NULL
);
468 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
472 degenerate_phi_result (gphi
*phi
)
474 tree lhs
= gimple_phi_result (phi
);
478 /* Ignoring arguments which are the same as LHS, if all the remaining
479 arguments are the same, then the PHI is a degenerate and has the
480 value of that common argument. */
481 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
483 tree arg
= gimple_phi_arg_def (phi
, i
);
493 /* We bring in some of operand_equal_p not only to speed things
494 up, but also to avoid crashing when dereferencing the type of
495 a released SSA name. */
496 else if (TREE_CODE (val
) != TREE_CODE (arg
)
497 || TREE_CODE (val
) == SSA_NAME
498 || !operand_equal_p (arg
, val
, 0))
501 return (i
== gimple_phi_num_args (phi
) ? val
: NULL
);
504 /* Set PHI nodes of a basic block BB to SEQ. */
507 set_phi_nodes (basic_block bb
, gimple_seq seq
)
509 gimple_stmt_iterator i
;
511 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
512 bb
->il
.gimple
.phi_nodes
= seq
;
514 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
515 gimple_set_bb (gsi_stmt (i
), bb
);
518 #include "gt-tree-phinodes.h"