* testsuite/26_numerics/headers/cmath/hypot.cc: XFAIL on AIX.
[official-gcc.git] / gcc / tree-ssa-phionlycprop.c
blobc6de27b008318e1ffc526e2a94f9c9d2c2c4e016
1 /* Const/Copy propagation originating from degenerate PHIs
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "fold-const.h"
29 #include "cfgloop.h"
30 #include "gimple-pretty-print.h"
31 #include "gimple-fold.h"
32 #include "tree-eh.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-pass.h"
36 #include "tree-ssa-propagate.h"
39 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
40 up degenerate PHIs created by or exposed by jump threading. */
42 /* Given a statement STMT, which is either a PHI node or an assignment,
43 remove it from the IL. */
45 static void
46 remove_stmt_or_phi (gimple *stmt)
48 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
50 if (gimple_code (stmt) == GIMPLE_PHI)
51 remove_phi_node (&gsi, true);
52 else
54 gsi_remove (&gsi, true);
55 release_defs (stmt);
59 /* Given a statement STMT, which is either a PHI node or an assignment,
60 return the "rhs" of the node, in the case of a non-degenerate
61 phi, NULL is returned. */
63 static tree
64 get_rhs_or_phi_arg (gimple *stmt)
66 if (gimple_code (stmt) == GIMPLE_PHI)
67 return degenerate_phi_result (as_a <gphi *> (stmt));
68 else if (gimple_assign_single_p (stmt))
69 return gimple_assign_rhs1 (stmt);
70 else
71 gcc_unreachable ();
75 /* Given a statement STMT, which is either a PHI node or an assignment,
76 return the "lhs" of the node. */
78 static tree
79 get_lhs_or_phi_result (gimple *stmt)
81 if (gimple_code (stmt) == GIMPLE_PHI)
82 return gimple_phi_result (stmt);
83 else if (is_gimple_assign (stmt))
84 return gimple_assign_lhs (stmt);
85 else
86 gcc_unreachable ();
89 /* Propagate RHS into all uses of LHS (when possible).
91 RHS and LHS are derived from STMT, which is passed in solely so
92 that we can remove it if propagation is successful.
94 When propagating into a PHI node or into a statement which turns
95 into a trivial copy or constant initialization, set the
96 appropriate bit in INTERESTING_NAMEs so that we will visit those
97 nodes as well in an effort to pick up secondary optimization
98 opportunities.
100 NEED_EH_CLEANUP tracks blocks that need their EH information
101 cleaned up after changing EH information on a statement. */
103 static bool
104 propagate_rhs_into_lhs (gimple *stmt, tree lhs, tree rhs,
105 bitmap interesting_names, bitmap need_eh_cleanup)
107 bool cfg_altered = false;
109 /* First verify that propagation is valid. */
110 if (may_propagate_copy (lhs, rhs))
112 use_operand_p use_p;
113 imm_use_iterator iter;
114 gimple *use_stmt;
115 bool all = true;
117 /* Dump details. */
118 if (dump_file && (dump_flags & TDF_DETAILS))
120 fprintf (dump_file, " Replacing '");
121 print_generic_expr (dump_file, lhs, dump_flags);
122 fprintf (dump_file, "' with %s '",
123 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
124 print_generic_expr (dump_file, rhs, dump_flags);
125 fprintf (dump_file, "'\n");
128 /* Walk over every use of LHS and try to replace the use with RHS.
129 At this point the only reason why such a propagation would not
130 be successful would be if the use occurs in an ASM_EXPR. */
131 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
133 /* Leave debug stmts alone. If we succeed in propagating
134 all non-debug uses, we'll drop the DEF, and propagation
135 into debug stmts will occur then. */
136 if (gimple_debug_bind_p (use_stmt))
137 continue;
139 /* It's not always safe to propagate into an ASM_EXPR. */
140 if (gimple_code (use_stmt) == GIMPLE_ASM
141 && ! may_propagate_copy_into_asm (lhs))
143 all = false;
144 continue;
147 /* It's not ok to propagate into the definition stmt of RHS.
148 <bb 9>:
149 # prephitmp.12_36 = PHI <g_67.1_6(9)>
150 g_67.1_6 = prephitmp.12_36;
151 goto <bb 9>;
152 While this is strictly all dead code we do not want to
153 deal with this here. */
154 if (TREE_CODE (rhs) == SSA_NAME
155 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
157 all = false;
158 continue;
161 /* Dump details. */
162 if (dump_file && (dump_flags & TDF_DETAILS))
164 fprintf (dump_file, " Original statement:");
165 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
168 /* Propagate the RHS into this use of the LHS. */
169 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
170 propagate_value (use_p, rhs);
172 /* Special cases to avoid useless calls into the folding
173 routines, operand scanning, etc.
175 Propagation into a PHI may cause the PHI to become
176 a degenerate, so mark the PHI as interesting. No other
177 actions are necessary. */
178 if (gimple_code (use_stmt) == GIMPLE_PHI)
180 tree result;
182 /* Dump details. */
183 if (dump_file && (dump_flags & TDF_DETAILS))
185 fprintf (dump_file, " Updated statement:");
186 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
189 result = get_lhs_or_phi_result (use_stmt);
190 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
191 continue;
194 /* From this point onward we are propagating into a
195 real statement. Folding may (or may not) be possible,
196 we may expose new operands, expose dead EH edges,
197 etc. */
198 /* NOTE tuples. In the tuples world, fold_stmt_inplace
199 cannot fold a call that simplifies to a constant,
200 because the GIMPLE_CALL must be replaced by a
201 GIMPLE_ASSIGN, and there is no way to effect such a
202 transformation in-place. We might want to consider
203 using the more general fold_stmt here. */
205 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
206 fold_stmt_inplace (&gsi);
209 /* Sometimes propagation can expose new operands to the
210 renamer. */
211 update_stmt (use_stmt);
213 /* Dump details. */
214 if (dump_file && (dump_flags & TDF_DETAILS))
216 fprintf (dump_file, " Updated statement:");
217 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
220 /* If we replaced a variable index with a constant, then
221 we would need to update the invariant flag for ADDR_EXPRs. */
222 if (gimple_assign_single_p (use_stmt)
223 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
224 recompute_tree_invariant_for_addr_expr
225 (gimple_assign_rhs1 (use_stmt));
227 /* If we cleaned up EH information from the statement,
228 mark its containing block as needing EH cleanups. */
229 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
231 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
232 if (dump_file && (dump_flags & TDF_DETAILS))
233 fprintf (dump_file, " Flagged to clear EH edges.\n");
236 /* Propagation may expose new trivial copy/constant propagation
237 opportunities. */
238 if (gimple_assign_single_p (use_stmt)
239 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
240 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
241 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
243 tree result = get_lhs_or_phi_result (use_stmt);
244 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
247 /* Propagation into these nodes may make certain edges in
248 the CFG unexecutable. We want to identify them as PHI nodes
249 at the destination of those unexecutable edges may become
250 degenerates. */
251 else if (gimple_code (use_stmt) == GIMPLE_COND
252 || gimple_code (use_stmt) == GIMPLE_SWITCH
253 || gimple_code (use_stmt) == GIMPLE_GOTO)
255 tree val;
257 if (gimple_code (use_stmt) == GIMPLE_COND)
258 val = fold_binary_loc (gimple_location (use_stmt),
259 gimple_cond_code (use_stmt),
260 boolean_type_node,
261 gimple_cond_lhs (use_stmt),
262 gimple_cond_rhs (use_stmt));
263 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
264 val = gimple_switch_index (as_a <gswitch *> (use_stmt));
265 else
266 val = gimple_goto_dest (use_stmt);
268 if (val && is_gimple_min_invariant (val))
270 basic_block bb = gimple_bb (use_stmt);
271 edge te = find_taken_edge (bb, val);
272 if (!te)
273 continue;
275 edge_iterator ei;
276 edge e;
277 gimple_stmt_iterator gsi;
278 gphi_iterator psi;
280 /* Remove all outgoing edges except TE. */
281 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
283 if (e != te)
285 /* Mark all the PHI nodes at the destination of
286 the unexecutable edge as interesting. */
287 for (psi = gsi_start_phis (e->dest);
288 !gsi_end_p (psi);
289 gsi_next (&psi))
291 gphi *phi = psi.phi ();
293 tree result = gimple_phi_result (phi);
294 int version = SSA_NAME_VERSION (result);
296 bitmap_set_bit (interesting_names, version);
299 te->probability += e->probability;
301 te->count += e->count;
302 remove_edge (e);
303 cfg_altered = true;
305 else
306 ei_next (&ei);
309 gsi = gsi_last_bb (gimple_bb (use_stmt));
310 gsi_remove (&gsi, true);
312 /* And fixup the flags on the single remaining edge. */
313 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
314 te->flags &= ~EDGE_ABNORMAL;
315 te->flags |= EDGE_FALLTHRU;
316 if (te->probability > REG_BR_PROB_BASE)
317 te->probability = REG_BR_PROB_BASE;
322 /* Ensure there is nothing else to do. */
323 gcc_assert (!all || has_zero_uses (lhs));
325 /* If we were able to propagate away all uses of LHS, then
326 we can remove STMT. */
327 if (all)
328 remove_stmt_or_phi (stmt);
330 return cfg_altered;
333 /* STMT is either a PHI node (potentially a degenerate PHI node) or
334 a statement that is a trivial copy or constant initialization.
336 Attempt to eliminate STMT by propagating its RHS into all uses of
337 its LHS. This may in turn set new bits in INTERESTING_NAMES
338 for nodes we want to revisit later.
340 All exit paths should clear INTERESTING_NAMES for the result
341 of STMT.
343 NEED_EH_CLEANUP tracks blocks that need their EH information
344 cleaned up after changing EH information on a statement. It is
345 not set or queried here, but passed along to children. */
347 static bool
348 eliminate_const_or_copy (gimple *stmt, bitmap interesting_names,
349 bitmap need_eh_cleanup)
351 tree lhs = get_lhs_or_phi_result (stmt);
352 tree rhs;
353 int version = SSA_NAME_VERSION (lhs);
354 bool cfg_altered = false;
356 /* If the LHS of this statement or PHI has no uses, then we can
357 just eliminate it. This can occur if, for example, the PHI
358 was created by block duplication due to threading and its only
359 use was in the conditional at the end of the block which was
360 deleted. */
361 if (has_zero_uses (lhs))
363 bitmap_clear_bit (interesting_names, version);
364 remove_stmt_or_phi (stmt);
365 return cfg_altered;
368 /* Get the RHS of the assignment or PHI node if the PHI is a
369 degenerate. */
370 rhs = get_rhs_or_phi_arg (stmt);
371 if (!rhs)
373 bitmap_clear_bit (interesting_names, version);
374 return cfg_altered;
377 if (!virtual_operand_p (lhs))
378 cfg_altered = propagate_rhs_into_lhs (stmt, lhs, rhs,
379 interesting_names, need_eh_cleanup);
380 else
382 gimple *use_stmt;
383 imm_use_iterator iter;
384 use_operand_p use_p;
385 /* For virtual operands we have to propagate into all uses as
386 otherwise we will create overlapping life-ranges. */
387 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
388 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
389 SET_USE (use_p, rhs);
390 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
391 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
392 remove_stmt_or_phi (stmt);
395 /* Note that STMT may well have been deleted by now, so do
396 not access it, instead use the saved version # to clear
397 T's entry in the worklist. */
398 bitmap_clear_bit (interesting_names, version);
399 return cfg_altered;
402 /* The first phase in degenerate PHI elimination.
404 Eliminate the degenerate PHIs in BB, then recurse on the
405 dominator children of BB.
407 INTERESTING_NAMES tracks SSA_NAMEs that we may want to revisit
408 in the future. It is not set or queried here, but passed along
409 to children.
411 NEED_EH_CLEANUP tracks blocks that need their EH information
412 cleaned up after changing EH information on a statement. It is
413 not set or queried here, but passed along to children. */
415 static bool
416 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names,
417 bitmap need_eh_cleanup)
419 gphi_iterator gsi;
420 basic_block son;
421 bool cfg_altered = false;
423 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
425 gphi *phi = gsi.phi ();
427 cfg_altered |= eliminate_const_or_copy (phi, interesting_names,
428 need_eh_cleanup);
431 /* Recurse into the dominator children of BB. */
432 for (son = first_dom_son (CDI_DOMINATORS, bb);
433 son;
434 son = next_dom_son (CDI_DOMINATORS, son))
435 cfg_altered |= eliminate_degenerate_phis_1 (son, interesting_names,
436 need_eh_cleanup);
438 return cfg_altered;
442 /* A very simple pass to eliminate degenerate PHI nodes from the
443 IL. This is meant to be fast enough to be able to be run several
444 times in the optimization pipeline.
446 Certain optimizations, particularly those which duplicate blocks
447 or remove edges from the CFG can create or expose PHIs which are
448 trivial copies or constant initializations.
450 While we could pick up these optimizations in DOM or with the
451 combination of copy-prop and CCP, those solutions are far too
452 heavy-weight for our needs.
454 This implementation has two phases so that we can efficiently
455 eliminate the first order degenerate PHIs and second order
456 degenerate PHIs.
458 The first phase performs a dominator walk to identify and eliminate
459 the vast majority of the degenerate PHIs. When a degenerate PHI
460 is identified and eliminated any affected statements or PHIs
461 are put on a worklist.
463 The second phase eliminates degenerate PHIs and trivial copies
464 or constant initializations using the worklist. This is how we
465 pick up the secondary optimization opportunities with minimal
466 cost. */
468 namespace {
470 const pass_data pass_data_phi_only_cprop =
472 GIMPLE_PASS, /* type */
473 "phicprop", /* name */
474 OPTGROUP_NONE, /* optinfo_flags */
475 TV_TREE_PHI_CPROP, /* tv_id */
476 ( PROP_cfg | PROP_ssa ), /* properties_required */
477 0, /* properties_provided */
478 0, /* properties_destroyed */
479 0, /* todo_flags_start */
480 ( TODO_cleanup_cfg | TODO_update_ssa ), /* todo_flags_finish */
483 class pass_phi_only_cprop : public gimple_opt_pass
485 public:
486 pass_phi_only_cprop (gcc::context *ctxt)
487 : gimple_opt_pass (pass_data_phi_only_cprop, ctxt)
490 /* opt_pass methods: */
491 opt_pass * clone () { return new pass_phi_only_cprop (m_ctxt); }
492 virtual bool gate (function *) { return flag_tree_dom != 0; }
493 virtual unsigned int execute (function *);
495 }; // class pass_phi_only_cprop
497 unsigned int
498 pass_phi_only_cprop::execute (function *fun)
500 bitmap interesting_names;
501 bitmap interesting_names1;
502 bool cfg_altered = false;
504 /* Bitmap of blocks which need EH information updated. We can not
505 update it on-the-fly as doing so invalidates the dominator tree. */
506 bitmap need_eh_cleanup = BITMAP_ALLOC (NULL);
508 /* INTERESTING_NAMES is effectively our worklist, indexed by
509 SSA_NAME_VERSION.
511 A set bit indicates that the statement or PHI node which
512 defines the SSA_NAME should be (re)examined to determine if
513 it has become a degenerate PHI or trivial const/copy propagation
514 opportunity.
516 Experiments have show we generally get better compilation
517 time behavior with bitmaps rather than sbitmaps. */
518 interesting_names = BITMAP_ALLOC (NULL);
519 interesting_names1 = BITMAP_ALLOC (NULL);
521 calculate_dominance_info (CDI_DOMINATORS);
522 cfg_altered = false;
524 /* First phase. Eliminate degenerate PHIs via a dominator
525 walk of the CFG.
527 Experiments have indicated that we generally get better
528 compile-time behavior by visiting blocks in the first
529 phase in dominator order. Presumably this is because walking
530 in dominator order leaves fewer PHIs for later examination
531 by the worklist phase. */
532 cfg_altered = eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR_FOR_FN (fun),
533 interesting_names,
534 need_eh_cleanup);
536 /* Second phase. Eliminate second order degenerate PHIs as well
537 as trivial copies or constant initializations identified by
538 the first phase or this phase. Basically we keep iterating
539 until our set of INTERESTING_NAMEs is empty. */
540 while (!bitmap_empty_p (interesting_names))
542 unsigned int i;
543 bitmap_iterator bi;
545 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
546 changed during the loop. Copy it to another bitmap and
547 use that. */
548 bitmap_copy (interesting_names1, interesting_names);
550 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
552 tree name = ssa_name (i);
554 /* Ignore SSA_NAMEs that have been released because
555 their defining statement was deleted (unreachable). */
556 if (name)
557 cfg_altered
558 |= eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
559 interesting_names, need_eh_cleanup);
563 if (cfg_altered)
565 free_dominance_info (CDI_DOMINATORS);
566 /* If we changed the CFG schedule loops for fixup by cfgcleanup. */
567 loops_state_set (LOOPS_NEED_FIXUP);
570 /* Propagation of const and copies may make some EH edges dead. Purge
571 such edges from the CFG as needed. */
572 if (!bitmap_empty_p (need_eh_cleanup))
574 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
575 BITMAP_FREE (need_eh_cleanup);
578 BITMAP_FREE (interesting_names);
579 BITMAP_FREE (interesting_names1);
580 return 0;
583 } // anon namespace
585 gimple_opt_pass *
586 make_pass_phi_only_cprop (gcc::context *ctxt)
588 return new pass_phi_only_cprop (ctxt);