Print SCoPs under CLooG format.
[official-gcc/graphite-test-results.git] / gcc / tree-ssa-dom.c
blobbbe453963b7cca9b74b9c9c841c3ae6758a043f9
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "cfgloop.h"
33 #include "output.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
45 #include "params.h"
47 /* This file implements optimizations on the dominator tree. */
49 /* Representation of a "naked" right-hand-side expression, to be used
50 in recording available expressions in the expression hash table. */
52 enum expr_kind
54 EXPR_SINGLE,
55 EXPR_UNARY,
56 EXPR_BINARY,
57 EXPR_CALL
60 struct hashable_expr
62 tree type;
63 enum expr_kind kind;
64 union {
65 struct { tree rhs; } single;
66 struct { enum tree_code op; tree opnd; } unary;
67 struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
68 struct { tree fn; bool pure; size_t nargs; tree *args; } call;
69 } ops;
72 /* Structure for recording known values of a conditional expression
73 at the exits from its block. */
75 struct cond_equivalence
77 struct hashable_expr cond;
78 tree value;
81 /* Structure for recording edge equivalences as well as any pending
82 edge redirections during the dominator optimizer.
84 Computing and storing the edge equivalences instead of creating
85 them on-demand can save significant amounts of time, particularly
86 for pathological cases involving switch statements.
88 These structures live for a single iteration of the dominator
89 optimizer in the edge's AUX field. At the end of an iteration we
90 free each of these structures and update the AUX field to point
91 to any requested redirection target (the code for updating the
92 CFG and SSA graph for edge redirection expects redirection edge
93 targets to be in the AUX field for each edge. */
95 struct edge_info
97 /* If this edge creates a simple equivalence, the LHS and RHS of
98 the equivalence will be stored here. */
99 tree lhs;
100 tree rhs;
102 /* Traversing an edge may also indicate one or more particular conditions
103 are true or false. The number of recorded conditions can vary, but
104 can be determined by the condition's code. So we have an array
105 and its maximum index rather than use a varray. */
106 struct cond_equivalence *cond_equivalences;
107 unsigned int max_cond_equivalences;
110 /* Hash table with expressions made available during the renaming process.
111 When an assignment of the form X_i = EXPR is found, the statement is
112 stored in this table. If the same expression EXPR is later found on the
113 RHS of another statement, it is replaced with X_i (thus performing
114 global redundancy elimination). Similarly as we pass through conditionals
115 we record the conditional itself as having either a true or false value
116 in this table. */
117 static htab_t avail_exprs;
119 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
120 expressions it enters into the hash table along with a marker entry
121 (null). When we finish processing the block, we pop off entries and
122 remove the expressions from the global hash table until we hit the
123 marker. */
124 typedef struct expr_hash_elt * expr_hash_elt_t;
125 DEF_VEC_P(expr_hash_elt_t);
126 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
128 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
130 /* Structure for entries in the expression hash table. */
132 struct expr_hash_elt
134 /* The value (lhs) of this expression. */
135 tree lhs;
137 /* The expression (rhs) we want to record. */
138 struct hashable_expr expr;
140 /* The stmt pointer if this element corresponds to a statement. */
141 gimple stmt;
143 /* The hash value for RHS. */
144 hashval_t hash;
146 /* A unique stamp, typically the address of the hash
147 element itself, used in removing entries from the table. */
148 struct expr_hash_elt *stamp;
151 /* Stack of dest,src pairs that need to be restored during finalization.
153 A NULL entry is used to mark the end of pairs which need to be
154 restored during finalization of this block. */
155 static VEC(tree,heap) *const_and_copies_stack;
157 /* Track whether or not we have changed the control flow graph. */
158 static bool cfg_altered;
160 /* Bitmap of blocks that have had EH statements cleaned. We should
161 remove their dead edges eventually. */
162 static bitmap need_eh_cleanup;
164 /* Statistics for dominator optimizations. */
165 struct opt_stats_d
167 long num_stmts;
168 long num_exprs_considered;
169 long num_re;
170 long num_const_prop;
171 long num_copy_prop;
174 static struct opt_stats_d opt_stats;
176 /* Local functions. */
177 static void optimize_stmt (basic_block, gimple_stmt_iterator);
178 static tree lookup_avail_expr (gimple, bool);
179 static hashval_t avail_expr_hash (const void *);
180 static hashval_t real_avail_expr_hash (const void *);
181 static int avail_expr_eq (const void *, const void *);
182 static void htab_statistics (FILE *, htab_t);
183 static void record_cond (struct cond_equivalence *);
184 static void record_const_or_copy (tree, tree);
185 static void record_equality (tree, tree);
186 static void record_equivalences_from_phis (basic_block);
187 static void record_equivalences_from_incoming_edge (basic_block);
188 static void eliminate_redundant_computations (gimple_stmt_iterator *);
189 static void record_equivalences_from_stmt (gimple, int);
190 static void dom_thread_across_edge (struct dom_walk_data *, edge);
191 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
192 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
193 static void remove_local_expressions_from_table (void);
194 static void restore_vars_to_original_value (void);
195 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
198 /* Given a statement STMT, initialize the hash table element pointed to
199 by ELEMENT. */
201 static void
202 initialize_hash_element (gimple stmt, tree lhs,
203 struct expr_hash_elt *element)
205 enum gimple_code code = gimple_code (stmt);
206 struct hashable_expr *expr = &element->expr;
208 if (code == GIMPLE_ASSIGN)
210 enum tree_code subcode = gimple_assign_rhs_code (stmt);
212 expr->type = NULL_TREE;
214 switch (get_gimple_rhs_class (subcode))
216 case GIMPLE_SINGLE_RHS:
217 expr->kind = EXPR_SINGLE;
218 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
219 break;
220 case GIMPLE_UNARY_RHS:
221 expr->kind = EXPR_UNARY;
222 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
223 expr->ops.unary.op = subcode;
224 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
225 break;
226 case GIMPLE_BINARY_RHS:
227 expr->kind = EXPR_BINARY;
228 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
229 expr->ops.binary.op = subcode;
230 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
231 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
232 break;
233 default:
234 gcc_unreachable ();
237 else if (code == GIMPLE_COND)
239 expr->type = boolean_type_node;
240 expr->kind = EXPR_BINARY;
241 expr->ops.binary.op = gimple_cond_code (stmt);
242 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
243 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
245 else if (code == GIMPLE_CALL)
247 size_t nargs = gimple_call_num_args (stmt);
248 size_t i;
250 gcc_assert (gimple_call_lhs (stmt));
252 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
253 expr->kind = EXPR_CALL;
254 expr->ops.call.fn = gimple_call_fn (stmt);
256 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
257 expr->ops.call.pure = true;
258 else
259 expr->ops.call.pure = false;
261 expr->ops.call.nargs = nargs;
262 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
263 for (i = 0; i < nargs; i++)
264 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
266 else if (code == GIMPLE_SWITCH)
268 expr->type = TREE_TYPE (gimple_switch_index (stmt));
269 expr->kind = EXPR_SINGLE;
270 expr->ops.single.rhs = gimple_switch_index (stmt);
272 else if (code == GIMPLE_GOTO)
274 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
275 expr->kind = EXPR_SINGLE;
276 expr->ops.single.rhs = gimple_goto_dest (stmt);
278 else
279 gcc_unreachable ();
281 element->lhs = lhs;
282 element->stmt = stmt;
283 element->hash = avail_expr_hash (element);
284 element->stamp = element;
287 /* Given a conditional expression COND as a tree, initialize
288 a hashable_expr expression EXPR. The conditional must be a
289 comparison or logical negation. A constant or a variable is
290 not permitted. */
292 static void
293 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
295 expr->type = boolean_type_node;
297 if (COMPARISON_CLASS_P (cond))
299 expr->kind = EXPR_BINARY;
300 expr->ops.binary.op = TREE_CODE (cond);
301 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
302 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
304 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
306 expr->kind = EXPR_UNARY;
307 expr->ops.unary.op = TRUTH_NOT_EXPR;
308 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
310 else
311 gcc_unreachable ();
314 /* Given a hashable_expr expression EXPR and an LHS,
315 initialize the hash table element pointed to by ELEMENT. */
317 static void
318 initialize_hash_element_from_expr (struct hashable_expr *expr,
319 tree lhs,
320 struct expr_hash_elt *element)
322 element->expr = *expr;
323 element->lhs = lhs;
324 element->stmt = NULL;
325 element->hash = avail_expr_hash (element);
326 element->stamp = element;
329 /* Compare two hashable_expr structures for equivalence.
330 They are considered equivalent when the the expressions
331 they denote must necessarily be equal. The logic is intended
332 to follow that of operand_equal_p in fold-const.c */
334 static bool
335 hashable_expr_equal_p (const struct hashable_expr *expr0,
336 const struct hashable_expr *expr1)
338 tree type0 = expr0->type;
339 tree type1 = expr1->type;
341 /* If either type is NULL, there is nothing to check. */
342 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
343 return false;
345 /* If both types don't have the same signedness, precision, and mode,
346 then we can't consider them equal. */
347 if (type0 != type1
348 && (TREE_CODE (type0) == ERROR_MARK
349 || TREE_CODE (type1) == ERROR_MARK
350 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
351 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
352 || TYPE_MODE (type0) != TYPE_MODE (type1)))
353 return false;
355 if (expr0->kind != expr1->kind)
356 return false;
358 switch (expr0->kind)
360 case EXPR_SINGLE:
361 return operand_equal_p (expr0->ops.single.rhs,
362 expr1->ops.single.rhs, 0);
364 case EXPR_UNARY:
365 if (expr0->ops.unary.op != expr1->ops.unary.op)
366 return false;
368 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
369 || expr0->ops.unary.op == NON_LVALUE_EXPR)
370 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
371 return false;
373 return operand_equal_p (expr0->ops.unary.opnd,
374 expr1->ops.unary.opnd, 0);
376 case EXPR_BINARY:
378 if (expr0->ops.binary.op != expr1->ops.binary.op)
379 return false;
381 if (operand_equal_p (expr0->ops.binary.opnd0,
382 expr1->ops.binary.opnd0, 0)
383 && operand_equal_p (expr0->ops.binary.opnd1,
384 expr1->ops.binary.opnd1, 0))
385 return true;
387 /* For commutative ops, allow the other order. */
388 return (commutative_tree_code (expr0->ops.binary.op)
389 && operand_equal_p (expr0->ops.binary.opnd0,
390 expr1->ops.binary.opnd1, 0)
391 && operand_equal_p (expr0->ops.binary.opnd1,
392 expr1->ops.binary.opnd0, 0));
395 case EXPR_CALL:
397 size_t i;
399 /* If the calls are to different functions, then they
400 clearly cannot be equal. */
401 if (! operand_equal_p (expr0->ops.call.fn,
402 expr1->ops.call.fn, 0))
403 return false;
405 if (! expr0->ops.call.pure)
406 return false;
408 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
409 return false;
411 for (i = 0; i < expr0->ops.call.nargs; i++)
412 if (! operand_equal_p (expr0->ops.call.args[i],
413 expr1->ops.call.args[i], 0))
414 return false;
416 return true;
419 default:
420 gcc_unreachable ();
424 /* Compute a hash value for a hashable_expr value EXPR and a
425 previously accumulated hash value VAL. If two hashable_expr
426 values compare equal with hashable_expr_equal_p, they must
427 hash to the same value, given an identical value of VAL.
428 The logic is intended to follow iterative_hash_expr in tree.c. */
430 static hashval_t
431 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
433 switch (expr->kind)
435 case EXPR_SINGLE:
436 val = iterative_hash_expr (expr->ops.single.rhs, val);
437 break;
439 case EXPR_UNARY:
440 val = iterative_hash_object (expr->ops.unary.op, val);
442 /* Make sure to include signedness in the hash computation.
443 Don't hash the type, that can lead to having nodes which
444 compare equal according to operand_equal_p, but which
445 have different hash codes. */
446 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
447 || expr->ops.unary.op == NON_LVALUE_EXPR)
448 val += TYPE_UNSIGNED (expr->type);
450 val = iterative_hash_expr (expr->ops.unary.opnd, val);
451 break;
453 case EXPR_BINARY:
454 val = iterative_hash_object (expr->ops.binary.op, val);
455 if (commutative_tree_code (expr->ops.binary.op))
456 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
457 expr->ops.binary.opnd1, val);
458 else
460 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
461 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
463 break;
465 case EXPR_CALL:
467 size_t i;
468 enum tree_code code = CALL_EXPR;
470 val = iterative_hash_object (code, val);
471 val = iterative_hash_expr (expr->ops.call.fn, val);
472 for (i = 0; i < expr->ops.call.nargs; i++)
473 val = iterative_hash_expr (expr->ops.call.args[i], val);
475 break;
477 default:
478 gcc_unreachable ();
481 return val;
484 /* Print a diagnostic dump of an expression hash table entry. */
486 static void
487 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
489 if (element->stmt)
490 fprintf (stream, "STMT ");
491 else
492 fprintf (stream, "COND ");
494 if (element->lhs)
496 print_generic_expr (stream, element->lhs, 0);
497 fprintf (stream, " = ");
500 switch (element->expr.kind)
502 case EXPR_SINGLE:
503 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
504 break;
506 case EXPR_UNARY:
507 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
508 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
509 break;
511 case EXPR_BINARY:
512 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
513 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
514 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
515 break;
517 case EXPR_CALL:
519 size_t i;
520 size_t nargs = element->expr.ops.call.nargs;
522 print_generic_expr (stream, element->expr.ops.call.fn, 0);
523 fprintf (stream, " (");
524 for (i = 0; i < nargs; i++)
526 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
527 if (i + 1 < nargs)
528 fprintf (stream, ", ");
530 fprintf (stream, ")");
532 break;
534 fprintf (stream, "\n");
536 if (element->stmt)
538 fprintf (stream, " ");
539 print_gimple_stmt (stream, element->stmt, 0, 0);
543 /* Delete an expr_hash_elt and reclaim its storage. */
545 static void
546 free_expr_hash_elt (void *elt)
548 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
550 if (element->expr.kind == EXPR_CALL)
551 free (element->expr.ops.call.args);
553 free (element);
556 /* Allocate an EDGE_INFO for edge E and attach it to E.
557 Return the new EDGE_INFO structure. */
559 static struct edge_info *
560 allocate_edge_info (edge e)
562 struct edge_info *edge_info;
564 edge_info = XCNEW (struct edge_info);
566 e->aux = edge_info;
567 return edge_info;
570 /* Free all EDGE_INFO structures associated with edges in the CFG.
571 If a particular edge can be threaded, copy the redirection
572 target from the EDGE_INFO structure into the edge's AUX field
573 as required by code to update the CFG and SSA graph for
574 jump threading. */
576 static void
577 free_all_edge_infos (void)
579 basic_block bb;
580 edge_iterator ei;
581 edge e;
583 FOR_EACH_BB (bb)
585 FOR_EACH_EDGE (e, ei, bb->preds)
587 struct edge_info *edge_info = (struct edge_info *) e->aux;
589 if (edge_info)
591 if (edge_info->cond_equivalences)
592 free (edge_info->cond_equivalences);
593 free (edge_info);
594 e->aux = NULL;
600 /* Jump threading, redundancy elimination and const/copy propagation.
602 This pass may expose new symbols that need to be renamed into SSA. For
603 every new symbol exposed, its corresponding bit will be set in
604 VARS_TO_RENAME. */
606 static unsigned int
607 tree_ssa_dominator_optimize (void)
609 struct dom_walk_data walk_data;
611 memset (&opt_stats, 0, sizeof (opt_stats));
613 /* Create our hash tables. */
614 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
615 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
616 const_and_copies_stack = VEC_alloc (tree, heap, 20);
617 need_eh_cleanup = BITMAP_ALLOC (NULL);
619 /* Setup callbacks for the generic dominator tree walker. */
620 walk_data.dom_direction = CDI_DOMINATORS;
621 walk_data.initialize_block_local_data = NULL;
622 walk_data.before_dom_children = dom_opt_enter_block;
623 walk_data.after_dom_children = dom_opt_leave_block;
624 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
625 When we attach more stuff we'll need to fill this out with a real
626 structure. */
627 walk_data.global_data = NULL;
628 walk_data.block_local_data_size = 0;
630 /* Now initialize the dominator walker. */
631 init_walk_dominator_tree (&walk_data);
633 calculate_dominance_info (CDI_DOMINATORS);
634 cfg_altered = false;
636 /* We need to know loop structures in order to avoid destroying them
637 in jump threading. Note that we still can e.g. thread through loop
638 headers to an exit edge, or through loop header to the loop body, assuming
639 that we update the loop info. */
640 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
642 /* Initialize the value-handle array. */
643 threadedge_initialize_values ();
645 /* We need accurate information regarding back edges in the CFG
646 for jump threading; this may include back edges that are not part of
647 a single loop. */
648 mark_dfs_back_edges ();
650 /* Recursively walk the dominator tree optimizing statements. */
651 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
654 gimple_stmt_iterator gsi;
655 basic_block bb;
656 FOR_EACH_BB (bb)
657 {for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
658 update_stmt_if_modified (gsi_stmt (gsi));
662 /* If we exposed any new variables, go ahead and put them into
663 SSA form now, before we handle jump threading. This simplifies
664 interactions between rewriting of _DECL nodes into SSA form
665 and rewriting SSA_NAME nodes into SSA form after block
666 duplication and CFG manipulation. */
667 update_ssa (TODO_update_ssa);
669 free_all_edge_infos ();
671 /* Thread jumps, creating duplicate blocks as needed. */
672 cfg_altered |= thread_through_all_blocks (first_pass_instance);
674 if (cfg_altered)
675 free_dominance_info (CDI_DOMINATORS);
677 /* Removal of statements may make some EH edges dead. Purge
678 such edges from the CFG as needed. */
679 if (!bitmap_empty_p (need_eh_cleanup))
681 unsigned i;
682 bitmap_iterator bi;
684 /* Jump threading may have created forwarder blocks from blocks
685 needing EH cleanup; the new successor of these blocks, which
686 has inherited from the original block, needs the cleanup. */
687 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
689 basic_block bb = BASIC_BLOCK (i);
690 if (single_succ_p (bb) == 1
691 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
693 bitmap_clear_bit (need_eh_cleanup, i);
694 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
698 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
699 bitmap_zero (need_eh_cleanup);
702 statistics_counter_event (cfun, "Redundant expressions eliminated",
703 opt_stats.num_re);
704 statistics_counter_event (cfun, "Constants propagated",
705 opt_stats.num_const_prop);
706 statistics_counter_event (cfun, "Copies propagated",
707 opt_stats.num_copy_prop);
709 /* Debugging dumps. */
710 if (dump_file && (dump_flags & TDF_STATS))
711 dump_dominator_optimization_stats (dump_file);
713 loop_optimizer_finalize ();
715 /* Delete our main hashtable. */
716 htab_delete (avail_exprs);
718 /* And finalize the dominator walker. */
719 fini_walk_dominator_tree (&walk_data);
721 /* Free asserted bitmaps and stacks. */
722 BITMAP_FREE (need_eh_cleanup);
724 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
725 VEC_free (tree, heap, const_and_copies_stack);
727 /* Free the value-handle array. */
728 threadedge_finalize_values ();
729 ssa_name_values = NULL;
731 return 0;
734 static bool
735 gate_dominator (void)
737 return flag_tree_dom != 0;
740 struct gimple_opt_pass pass_dominator =
743 GIMPLE_PASS,
744 "dom", /* name */
745 gate_dominator, /* gate */
746 tree_ssa_dominator_optimize, /* execute */
747 NULL, /* sub */
748 NULL, /* next */
749 0, /* static_pass_number */
750 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
751 PROP_cfg | PROP_ssa, /* properties_required */
752 0, /* properties_provided */
753 0, /* properties_destroyed */
754 0, /* todo_flags_start */
755 TODO_dump_func
756 | TODO_update_ssa
757 | TODO_cleanup_cfg
758 | TODO_verify_ssa /* todo_flags_finish */
763 /* Given a conditional statement CONDSTMT, convert the
764 condition to a canonical form. */
766 static void
767 canonicalize_comparison (gimple condstmt)
769 tree op0;
770 tree op1;
771 enum tree_code code;
773 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
775 op0 = gimple_cond_lhs (condstmt);
776 op1 = gimple_cond_rhs (condstmt);
778 code = gimple_cond_code (condstmt);
780 /* If it would be profitable to swap the operands, then do so to
781 canonicalize the statement, enabling better optimization.
783 By placing canonicalization of such expressions here we
784 transparently keep statements in canonical form, even
785 when the statement is modified. */
786 if (tree_swap_operands_p (op0, op1, false))
788 /* For relationals we need to swap the operands
789 and change the code. */
790 if (code == LT_EXPR
791 || code == GT_EXPR
792 || code == LE_EXPR
793 || code == GE_EXPR)
795 code = swap_tree_comparison (code);
797 gimple_cond_set_code (condstmt, code);
798 gimple_cond_set_lhs (condstmt, op1);
799 gimple_cond_set_rhs (condstmt, op0);
801 update_stmt (condstmt);
806 /* Initialize local stacks for this optimizer and record equivalences
807 upon entry to BB. Equivalences can come from the edge traversed to
808 reach BB or they may come from PHI nodes at the start of BB. */
810 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
811 LIMIT entries left in LOCALs. */
813 static void
814 remove_local_expressions_from_table (void)
816 /* Remove all the expressions made available in this block. */
817 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
819 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
820 void **slot;
822 if (victim == NULL)
823 break;
825 /* This must precede the actual removal from the hash table,
826 as ELEMENT and the table entry may share a call argument
827 vector which will be freed during removal. */
828 if (dump_file && (dump_flags & TDF_DETAILS))
830 fprintf (dump_file, "<<<< ");
831 print_expr_hash_elt (dump_file, victim);
834 slot = htab_find_slot_with_hash (avail_exprs,
835 victim, victim->hash, NO_INSERT);
836 gcc_assert (slot && *slot == (void *) victim);
837 htab_clear_slot (avail_exprs, slot);
841 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
842 CONST_AND_COPIES to its original state, stopping when we hit a
843 NULL marker. */
845 static void
846 restore_vars_to_original_value (void)
848 while (VEC_length (tree, const_and_copies_stack) > 0)
850 tree prev_value, dest;
852 dest = VEC_pop (tree, const_and_copies_stack);
854 if (dest == NULL)
855 break;
857 if (dump_file && (dump_flags & TDF_DETAILS))
859 fprintf (dump_file, "<<<< COPY ");
860 print_generic_expr (dump_file, dest, 0);
861 fprintf (dump_file, " = ");
862 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
863 fprintf (dump_file, "\n");
866 prev_value = VEC_pop (tree, const_and_copies_stack);
867 set_ssa_name_value (dest, prev_value);
871 /* A trivial wrapper so that we can present the generic jump
872 threading code with a simple API for simplifying statements. */
873 static tree
874 simplify_stmt_for_jump_threading (gimple stmt,
875 gimple within_stmt ATTRIBUTE_UNUSED)
877 return lookup_avail_expr (stmt, false);
880 /* Wrapper for common code to attempt to thread an edge. For example,
881 it handles lazily building the dummy condition and the bookkeeping
882 when jump threading is successful. */
884 static void
885 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
887 if (! walk_data->global_data)
889 gimple dummy_cond =
890 gimple_build_cond (NE_EXPR,
891 integer_zero_node, integer_zero_node,
892 NULL, NULL);
893 walk_data->global_data = dummy_cond;
896 thread_across_edge ((gimple) walk_data->global_data, e, false,
897 &const_and_copies_stack,
898 simplify_stmt_for_jump_threading);
901 /* PHI nodes can create equivalences too.
903 Ignoring any alternatives which are the same as the result, if
904 all the alternatives are equal, then the PHI node creates an
905 equivalence. */
907 static void
908 record_equivalences_from_phis (basic_block bb)
910 gimple_stmt_iterator gsi;
912 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
914 gimple phi = gsi_stmt (gsi);
916 tree lhs = gimple_phi_result (phi);
917 tree rhs = NULL;
918 size_t i;
920 for (i = 0; i < gimple_phi_num_args (phi); i++)
922 tree t = gimple_phi_arg_def (phi, i);
924 /* Ignore alternatives which are the same as our LHS. Since
925 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
926 can simply compare pointers. */
927 if (lhs == t)
928 continue;
930 /* If we have not processed an alternative yet, then set
931 RHS to this alternative. */
932 if (rhs == NULL)
933 rhs = t;
934 /* If we have processed an alternative (stored in RHS), then
935 see if it is equal to this one. If it isn't, then stop
936 the search. */
937 else if (! operand_equal_for_phi_arg_p (rhs, t))
938 break;
941 /* If we had no interesting alternatives, then all the RHS alternatives
942 must have been the same as LHS. */
943 if (!rhs)
944 rhs = lhs;
946 /* If we managed to iterate through each PHI alternative without
947 breaking out of the loop, then we have a PHI which may create
948 a useful equivalence. We do not need to record unwind data for
949 this, since this is a true assignment and not an equivalence
950 inferred from a comparison. All uses of this ssa name are dominated
951 by this assignment, so unwinding just costs time and space. */
952 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
953 set_ssa_name_value (lhs, rhs);
957 /* Ignoring loop backedges, if BB has precisely one incoming edge then
958 return that edge. Otherwise return NULL. */
959 static edge
960 single_incoming_edge_ignoring_loop_edges (basic_block bb)
962 edge retval = NULL;
963 edge e;
964 edge_iterator ei;
966 FOR_EACH_EDGE (e, ei, bb->preds)
968 /* A loop back edge can be identified by the destination of
969 the edge dominating the source of the edge. */
970 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
971 continue;
973 /* If we have already seen a non-loop edge, then we must have
974 multiple incoming non-loop edges and thus we return NULL. */
975 if (retval)
976 return NULL;
978 /* This is the first non-loop incoming edge we have found. Record
979 it. */
980 retval = e;
983 return retval;
986 /* Record any equivalences created by the incoming edge to BB. If BB
987 has more than one incoming edge, then no equivalence is created. */
989 static void
990 record_equivalences_from_incoming_edge (basic_block bb)
992 edge e;
993 basic_block parent;
994 struct edge_info *edge_info;
996 /* If our parent block ended with a control statement, then we may be
997 able to record some equivalences based on which outgoing edge from
998 the parent was followed. */
999 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1001 e = single_incoming_edge_ignoring_loop_edges (bb);
1003 /* If we had a single incoming edge from our parent block, then enter
1004 any data associated with the edge into our tables. */
1005 if (e && e->src == parent)
1007 unsigned int i;
1009 edge_info = (struct edge_info *) e->aux;
1011 if (edge_info)
1013 tree lhs = edge_info->lhs;
1014 tree rhs = edge_info->rhs;
1015 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1017 if (lhs)
1018 record_equality (lhs, rhs);
1020 if (cond_equivalences)
1021 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1022 record_cond (&cond_equivalences[i]);
1027 /* Dump SSA statistics on FILE. */
1029 void
1030 dump_dominator_optimization_stats (FILE *file)
1032 fprintf (file, "Total number of statements: %6ld\n\n",
1033 opt_stats.num_stmts);
1034 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1035 opt_stats.num_exprs_considered);
1037 fprintf (file, "\nHash table statistics:\n");
1039 fprintf (file, " avail_exprs: ");
1040 htab_statistics (file, avail_exprs);
1044 /* Dump SSA statistics on stderr. */
1046 void
1047 debug_dominator_optimization_stats (void)
1049 dump_dominator_optimization_stats (stderr);
1053 /* Dump statistics for the hash table HTAB. */
1055 static void
1056 htab_statistics (FILE *file, htab_t htab)
1058 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1059 (long) htab_size (htab),
1060 (long) htab_elements (htab),
1061 htab_collisions (htab));
1065 /* Enter condition equivalence into the expression hash table.
1066 This indicates that a conditional expression has a known
1067 boolean value. */
1069 static void
1070 record_cond (struct cond_equivalence *p)
1072 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1073 void **slot;
1075 initialize_hash_element_from_expr (&p->cond, p->value, element);
1077 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1078 element->hash, INSERT);
1079 if (*slot == NULL)
1081 *slot = (void *) element;
1083 if (dump_file && (dump_flags & TDF_DETAILS))
1085 fprintf (dump_file, "1>>> ");
1086 print_expr_hash_elt (dump_file, element);
1089 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1091 else
1092 free (element);
1095 /* Build a cond_equivalence record indicating that the comparison
1096 CODE holds between operands OP0 and OP1. */
1098 static void
1099 build_and_record_new_cond (enum tree_code code,
1100 tree op0, tree op1,
1101 struct cond_equivalence *p)
1103 struct hashable_expr *cond = &p->cond;
1105 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1107 cond->type = boolean_type_node;
1108 cond->kind = EXPR_BINARY;
1109 cond->ops.binary.op = code;
1110 cond->ops.binary.opnd0 = op0;
1111 cond->ops.binary.opnd1 = op1;
1113 p->value = boolean_true_node;
1116 /* Record that COND is true and INVERTED is false into the edge information
1117 structure. Also record that any conditions dominated by COND are true
1118 as well.
1120 For example, if a < b is true, then a <= b must also be true. */
1122 static void
1123 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1125 tree op0, op1;
1127 if (!COMPARISON_CLASS_P (cond))
1128 return;
1130 op0 = TREE_OPERAND (cond, 0);
1131 op1 = TREE_OPERAND (cond, 1);
1133 switch (TREE_CODE (cond))
1135 case LT_EXPR:
1136 case GT_EXPR:
1137 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1139 edge_info->max_cond_equivalences = 6;
1140 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 6);
1141 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1142 &edge_info->cond_equivalences[4]);
1143 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1144 &edge_info->cond_equivalences[5]);
1146 else
1148 edge_info->max_cond_equivalences = 4;
1149 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1152 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1153 ? LE_EXPR : GE_EXPR),
1154 op0, op1, &edge_info->cond_equivalences[2]);
1155 build_and_record_new_cond (NE_EXPR, op0, op1,
1156 &edge_info->cond_equivalences[3]);
1157 break;
1159 case GE_EXPR:
1160 case LE_EXPR:
1161 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1163 edge_info->max_cond_equivalences = 3;
1164 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 3);
1165 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1166 &edge_info->cond_equivalences[2]);
1168 else
1170 edge_info->max_cond_equivalences = 2;
1171 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1173 break;
1175 case EQ_EXPR:
1176 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1178 edge_info->max_cond_equivalences = 5;
1179 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 5);
1180 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1181 &edge_info->cond_equivalences[4]);
1183 else
1185 edge_info->max_cond_equivalences = 4;
1186 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1188 build_and_record_new_cond (LE_EXPR, op0, op1,
1189 &edge_info->cond_equivalences[2]);
1190 build_and_record_new_cond (GE_EXPR, op0, op1,
1191 &edge_info->cond_equivalences[3]);
1192 break;
1194 case UNORDERED_EXPR:
1195 edge_info->max_cond_equivalences = 8;
1196 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 8);
1197 build_and_record_new_cond (NE_EXPR, op0, op1,
1198 &edge_info->cond_equivalences[2]);
1199 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1200 &edge_info->cond_equivalences[3]);
1201 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1202 &edge_info->cond_equivalences[4]);
1203 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1204 &edge_info->cond_equivalences[5]);
1205 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1206 &edge_info->cond_equivalences[6]);
1207 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1208 &edge_info->cond_equivalences[7]);
1209 break;
1211 case UNLT_EXPR:
1212 case UNGT_EXPR:
1213 edge_info->max_cond_equivalences = 4;
1214 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1215 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1216 ? UNLE_EXPR : UNGE_EXPR),
1217 op0, op1, &edge_info->cond_equivalences[2]);
1218 build_and_record_new_cond (NE_EXPR, op0, op1,
1219 &edge_info->cond_equivalences[3]);
1220 break;
1222 case UNEQ_EXPR:
1223 edge_info->max_cond_equivalences = 4;
1224 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1225 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1226 &edge_info->cond_equivalences[2]);
1227 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1228 &edge_info->cond_equivalences[3]);
1229 break;
1231 case LTGT_EXPR:
1232 edge_info->max_cond_equivalences = 4;
1233 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1234 build_and_record_new_cond (NE_EXPR, op0, op1,
1235 &edge_info->cond_equivalences[2]);
1236 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1237 &edge_info->cond_equivalences[3]);
1238 break;
1240 default:
1241 edge_info->max_cond_equivalences = 2;
1242 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1243 break;
1246 /* Now store the original true and false conditions into the first
1247 two slots. */
1248 initialize_expr_from_cond (cond, &edge_info->cond_equivalences[0].cond);
1249 edge_info->cond_equivalences[0].value = boolean_true_node;
1251 /* It is possible for INVERTED to be the negation of a comparison,
1252 and not a valid RHS or GIMPLE_COND condition. This happens because
1253 invert_truthvalue may return such an expression when asked to invert
1254 a floating-point comparison. These comparisons are not assumed to
1255 obey the trichotomy law. */
1256 initialize_expr_from_cond (inverted, &edge_info->cond_equivalences[1].cond);
1257 edge_info->cond_equivalences[1].value = boolean_false_node;
1260 /* A helper function for record_const_or_copy and record_equality.
1261 Do the work of recording the value and undo info. */
1263 static void
1264 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1266 set_ssa_name_value (x, y);
1268 if (dump_file && (dump_flags & TDF_DETAILS))
1270 fprintf (dump_file, "0>>> COPY ");
1271 print_generic_expr (dump_file, x, 0);
1272 fprintf (dump_file, " = ");
1273 print_generic_expr (dump_file, y, 0);
1274 fprintf (dump_file, "\n");
1277 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1278 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1279 VEC_quick_push (tree, const_and_copies_stack, x);
1282 /* Return the loop depth of the basic block of the defining statement of X.
1283 This number should not be treated as absolutely correct because the loop
1284 information may not be completely up-to-date when dom runs. However, it
1285 will be relatively correct, and as more passes are taught to keep loop info
1286 up to date, the result will become more and more accurate. */
1289 loop_depth_of_name (tree x)
1291 gimple defstmt;
1292 basic_block defbb;
1294 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1295 if (TREE_CODE (x) != SSA_NAME)
1296 return 0;
1298 /* Otherwise return the loop depth of the defining statement's bb.
1299 Note that there may not actually be a bb for this statement, if the
1300 ssa_name is live on entry. */
1301 defstmt = SSA_NAME_DEF_STMT (x);
1302 defbb = gimple_bb (defstmt);
1303 if (!defbb)
1304 return 0;
1306 return defbb->loop_depth;
1309 /* Record that X is equal to Y in const_and_copies. Record undo
1310 information in the block-local vector. */
1312 static void
1313 record_const_or_copy (tree x, tree y)
1315 tree prev_x = SSA_NAME_VALUE (x);
1317 gcc_assert (TREE_CODE (x) == SSA_NAME);
1319 if (TREE_CODE (y) == SSA_NAME)
1321 tree tmp = SSA_NAME_VALUE (y);
1322 if (tmp)
1323 y = tmp;
1326 record_const_or_copy_1 (x, y, prev_x);
1329 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1330 This constrains the cases in which we may treat this as assignment. */
1332 static void
1333 record_equality (tree x, tree y)
1335 tree prev_x = NULL, prev_y = NULL;
1337 if (TREE_CODE (x) == SSA_NAME)
1338 prev_x = SSA_NAME_VALUE (x);
1339 if (TREE_CODE (y) == SSA_NAME)
1340 prev_y = SSA_NAME_VALUE (y);
1342 /* If one of the previous values is invariant, or invariant in more loops
1343 (by depth), then use that.
1344 Otherwise it doesn't matter which value we choose, just so
1345 long as we canonicalize on one value. */
1346 if (is_gimple_min_invariant (y))
1348 else if (is_gimple_min_invariant (x)
1349 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1350 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1351 else if (prev_x && is_gimple_min_invariant (prev_x))
1352 x = y, y = prev_x, prev_x = prev_y;
1353 else if (prev_y)
1354 y = prev_y;
1356 /* After the swapping, we must have one SSA_NAME. */
1357 if (TREE_CODE (x) != SSA_NAME)
1358 return;
1360 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1361 variable compared against zero. If we're honoring signed zeros,
1362 then we cannot record this value unless we know that the value is
1363 nonzero. */
1364 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1365 && (TREE_CODE (y) != REAL_CST
1366 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1367 return;
1369 record_const_or_copy_1 (x, y, prev_x);
1372 /* Returns true when STMT is a simple iv increment. It detects the
1373 following situation:
1375 i_1 = phi (..., i_2)
1376 i_2 = i_1 +/- ... */
1378 static bool
1379 simple_iv_increment_p (gimple stmt)
1381 tree lhs, preinc;
1382 gimple phi;
1383 size_t i;
1385 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1386 return false;
1388 lhs = gimple_assign_lhs (stmt);
1389 if (TREE_CODE (lhs) != SSA_NAME)
1390 return false;
1392 if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
1393 && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
1394 return false;
1396 preinc = gimple_assign_rhs1 (stmt);
1398 if (TREE_CODE (preinc) != SSA_NAME)
1399 return false;
1401 phi = SSA_NAME_DEF_STMT (preinc);
1402 if (gimple_code (phi) != GIMPLE_PHI)
1403 return false;
1405 for (i = 0; i < gimple_phi_num_args (phi); i++)
1406 if (gimple_phi_arg_def (phi, i) == lhs)
1407 return true;
1409 return false;
1412 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1413 known value for that SSA_NAME (or NULL if no value is known).
1415 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1416 successors of BB. */
1418 static void
1419 cprop_into_successor_phis (basic_block bb)
1421 edge e;
1422 edge_iterator ei;
1424 FOR_EACH_EDGE (e, ei, bb->succs)
1426 int indx;
1427 gimple_stmt_iterator gsi;
1429 /* If this is an abnormal edge, then we do not want to copy propagate
1430 into the PHI alternative associated with this edge. */
1431 if (e->flags & EDGE_ABNORMAL)
1432 continue;
1434 gsi = gsi_start_phis (e->dest);
1435 if (gsi_end_p (gsi))
1436 continue;
1438 indx = e->dest_idx;
1439 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1441 tree new_val;
1442 use_operand_p orig_p;
1443 tree orig_val;
1444 gimple phi = gsi_stmt (gsi);
1446 /* The alternative may be associated with a constant, so verify
1447 it is an SSA_NAME before doing anything with it. */
1448 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1449 orig_val = get_use_from_ptr (orig_p);
1450 if (TREE_CODE (orig_val) != SSA_NAME)
1451 continue;
1453 /* If we have *ORIG_P in our constant/copy table, then replace
1454 ORIG_P with its value in our constant/copy table. */
1455 new_val = SSA_NAME_VALUE (orig_val);
1456 if (new_val
1457 && new_val != orig_val
1458 && (TREE_CODE (new_val) == SSA_NAME
1459 || is_gimple_min_invariant (new_val))
1460 && may_propagate_copy (orig_val, new_val))
1461 propagate_value (orig_p, new_val);
1466 /* We have finished optimizing BB, record any information implied by
1467 taking a specific outgoing edge from BB. */
1469 static void
1470 record_edge_info (basic_block bb)
1472 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1473 struct edge_info *edge_info;
1475 if (! gsi_end_p (gsi))
1477 gimple stmt = gsi_stmt (gsi);
1478 location_t loc = gimple_location (stmt);
1480 if (gimple_code (stmt) == GIMPLE_SWITCH)
1482 tree index = gimple_switch_index (stmt);
1484 if (TREE_CODE (index) == SSA_NAME)
1486 int i;
1487 int n_labels = gimple_switch_num_labels (stmt);
1488 tree *info = XCNEWVEC (tree, last_basic_block);
1489 edge e;
1490 edge_iterator ei;
1492 for (i = 0; i < n_labels; i++)
1494 tree label = gimple_switch_label (stmt, i);
1495 basic_block target_bb = label_to_block (CASE_LABEL (label));
1496 if (CASE_HIGH (label)
1497 || !CASE_LOW (label)
1498 || info[target_bb->index])
1499 info[target_bb->index] = error_mark_node;
1500 else
1501 info[target_bb->index] = label;
1504 FOR_EACH_EDGE (e, ei, bb->succs)
1506 basic_block target_bb = e->dest;
1507 tree label = info[target_bb->index];
1509 if (label != NULL && label != error_mark_node)
1511 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1512 CASE_LOW (label));
1513 edge_info = allocate_edge_info (e);
1514 edge_info->lhs = index;
1515 edge_info->rhs = x;
1518 free (info);
1522 /* A COND_EXPR may create equivalences too. */
1523 if (gimple_code (stmt) == GIMPLE_COND)
1525 edge true_edge;
1526 edge false_edge;
1528 tree op0 = gimple_cond_lhs (stmt);
1529 tree op1 = gimple_cond_rhs (stmt);
1530 enum tree_code code = gimple_cond_code (stmt);
1532 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1534 /* Special case comparing booleans against a constant as we
1535 know the value of OP0 on both arms of the branch. i.e., we
1536 can record an equivalence for OP0 rather than COND. */
1537 if ((code == EQ_EXPR || code == NE_EXPR)
1538 && TREE_CODE (op0) == SSA_NAME
1539 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1540 && is_gimple_min_invariant (op1))
1542 if (code == EQ_EXPR)
1544 edge_info = allocate_edge_info (true_edge);
1545 edge_info->lhs = op0;
1546 edge_info->rhs = (integer_zerop (op1)
1547 ? boolean_false_node
1548 : boolean_true_node);
1550 edge_info = allocate_edge_info (false_edge);
1551 edge_info->lhs = op0;
1552 edge_info->rhs = (integer_zerop (op1)
1553 ? boolean_true_node
1554 : boolean_false_node);
1556 else
1558 edge_info = allocate_edge_info (true_edge);
1559 edge_info->lhs = op0;
1560 edge_info->rhs = (integer_zerop (op1)
1561 ? boolean_true_node
1562 : boolean_false_node);
1564 edge_info = allocate_edge_info (false_edge);
1565 edge_info->lhs = op0;
1566 edge_info->rhs = (integer_zerop (op1)
1567 ? boolean_false_node
1568 : boolean_true_node);
1571 else if (is_gimple_min_invariant (op0)
1572 && (TREE_CODE (op1) == SSA_NAME
1573 || is_gimple_min_invariant (op1)))
1575 tree cond = build2 (code, boolean_type_node, op0, op1);
1576 tree inverted = invert_truthvalue_loc (loc, cond);
1577 struct edge_info *edge_info;
1579 edge_info = allocate_edge_info (true_edge);
1580 record_conditions (edge_info, cond, inverted);
1582 if (code == EQ_EXPR)
1584 edge_info->lhs = op1;
1585 edge_info->rhs = op0;
1588 edge_info = allocate_edge_info (false_edge);
1589 record_conditions (edge_info, inverted, cond);
1591 if (code == NE_EXPR)
1593 edge_info->lhs = op1;
1594 edge_info->rhs = op0;
1598 else if (TREE_CODE (op0) == SSA_NAME
1599 && (is_gimple_min_invariant (op1)
1600 || TREE_CODE (op1) == SSA_NAME))
1602 tree cond = build2 (code, boolean_type_node, op0, op1);
1603 tree inverted = invert_truthvalue_loc (loc, cond);
1604 struct edge_info *edge_info;
1606 edge_info = allocate_edge_info (true_edge);
1607 record_conditions (edge_info, cond, inverted);
1609 if (code == EQ_EXPR)
1611 edge_info->lhs = op0;
1612 edge_info->rhs = op1;
1615 edge_info = allocate_edge_info (false_edge);
1616 record_conditions (edge_info, inverted, cond);
1618 if (TREE_CODE (cond) == NE_EXPR)
1620 edge_info->lhs = op0;
1621 edge_info->rhs = op1;
1626 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1630 static void
1631 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1632 basic_block bb)
1634 gimple_stmt_iterator gsi;
1636 if (dump_file && (dump_flags & TDF_DETAILS))
1637 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1639 /* Push a marker on the stacks of local information so that we know how
1640 far to unwind when we finalize this block. */
1641 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1642 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1644 record_equivalences_from_incoming_edge (bb);
1646 /* PHI nodes can create equivalences too. */
1647 record_equivalences_from_phis (bb);
1649 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1650 optimize_stmt (bb, gsi);
1652 /* Now prepare to process dominated blocks. */
1653 record_edge_info (bb);
1654 cprop_into_successor_phis (bb);
1657 /* We have finished processing the dominator children of BB, perform
1658 any finalization actions in preparation for leaving this node in
1659 the dominator tree. */
1661 static void
1662 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1664 gimple last;
1666 /* If we have an outgoing edge to a block with multiple incoming and
1667 outgoing edges, then we may be able to thread the edge, i.e., we
1668 may be able to statically determine which of the outgoing edges
1669 will be traversed when the incoming edge from BB is traversed. */
1670 if (single_succ_p (bb)
1671 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1672 && potentially_threadable_block (single_succ (bb)))
1674 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1676 else if ((last = last_stmt (bb))
1677 && gimple_code (last) == GIMPLE_COND
1678 && EDGE_COUNT (bb->succs) == 2
1679 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1680 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1682 edge true_edge, false_edge;
1684 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1686 /* Only try to thread the edge if it reaches a target block with
1687 more than one predecessor and more than one successor. */
1688 if (potentially_threadable_block (true_edge->dest))
1690 struct edge_info *edge_info;
1691 unsigned int i;
1693 /* Push a marker onto the available expression stack so that we
1694 unwind any expressions related to the TRUE arm before processing
1695 the false arm below. */
1696 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1697 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1699 edge_info = (struct edge_info *) true_edge->aux;
1701 /* If we have info associated with this edge, record it into
1702 our equivalence tables. */
1703 if (edge_info)
1705 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1706 tree lhs = edge_info->lhs;
1707 tree rhs = edge_info->rhs;
1709 /* If we have a simple NAME = VALUE equivalence, record it. */
1710 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1711 record_const_or_copy (lhs, rhs);
1713 /* If we have 0 = COND or 1 = COND equivalences, record them
1714 into our expression hash tables. */
1715 if (cond_equivalences)
1716 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1717 record_cond (&cond_equivalences[i]);
1720 dom_thread_across_edge (walk_data, true_edge);
1722 /* And restore the various tables to their state before
1723 we threaded this edge. */
1724 remove_local_expressions_from_table ();
1727 /* Similarly for the ELSE arm. */
1728 if (potentially_threadable_block (false_edge->dest))
1730 struct edge_info *edge_info;
1731 unsigned int i;
1733 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1734 edge_info = (struct edge_info *) false_edge->aux;
1736 /* If we have info associated with this edge, record it into
1737 our equivalence tables. */
1738 if (edge_info)
1740 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1741 tree lhs = edge_info->lhs;
1742 tree rhs = edge_info->rhs;
1744 /* If we have a simple NAME = VALUE equivalence, record it. */
1745 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1746 record_const_or_copy (lhs, rhs);
1748 /* If we have 0 = COND or 1 = COND equivalences, record them
1749 into our expression hash tables. */
1750 if (cond_equivalences)
1751 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1752 record_cond (&cond_equivalences[i]);
1755 /* Now thread the edge. */
1756 dom_thread_across_edge (walk_data, false_edge);
1758 /* No need to remove local expressions from our tables
1759 or restore vars to their original value as that will
1760 be done immediately below. */
1764 remove_local_expressions_from_table ();
1765 restore_vars_to_original_value ();
1768 /* Search for redundant computations in STMT. If any are found, then
1769 replace them with the variable holding the result of the computation.
1771 If safe, record this expression into the available expression hash
1772 table. */
1774 static void
1775 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1777 tree expr_type;
1778 tree cached_lhs;
1779 bool insert = true;
1780 bool assigns_var_p = false;
1782 gimple stmt = gsi_stmt (*gsi);
1784 tree def = gimple_get_lhs (stmt);
1786 /* Certain expressions on the RHS can be optimized away, but can not
1787 themselves be entered into the hash tables. */
1788 if (! def
1789 || TREE_CODE (def) != SSA_NAME
1790 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1791 || gimple_vdef (stmt)
1792 /* Do not record equivalences for increments of ivs. This would create
1793 overlapping live ranges for a very questionable gain. */
1794 || simple_iv_increment_p (stmt))
1795 insert = false;
1797 /* Check if the expression has been computed before. */
1798 cached_lhs = lookup_avail_expr (stmt, insert);
1800 opt_stats.num_exprs_considered++;
1802 /* Get the type of the expression we are trying to optimize. */
1803 if (is_gimple_assign (stmt))
1805 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1806 assigns_var_p = true;
1808 else if (gimple_code (stmt) == GIMPLE_COND)
1809 expr_type = boolean_type_node;
1810 else if (is_gimple_call (stmt))
1812 gcc_assert (gimple_call_lhs (stmt));
1813 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1814 assigns_var_p = true;
1816 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1817 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1818 else
1819 gcc_unreachable ();
1821 if (!cached_lhs)
1822 return;
1824 /* It is safe to ignore types here since we have already done
1825 type checking in the hashing and equality routines. In fact
1826 type checking here merely gets in the way of constant
1827 propagation. Also, make sure that it is safe to propagate
1828 CACHED_LHS into the expression in STMT. */
1829 if ((TREE_CODE (cached_lhs) != SSA_NAME
1830 && (assigns_var_p
1831 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1832 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1834 #if defined ENABLE_CHECKING
1835 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1836 || is_gimple_min_invariant (cached_lhs));
1837 #endif
1839 if (dump_file && (dump_flags & TDF_DETAILS))
1841 fprintf (dump_file, " Replaced redundant expr '");
1842 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1843 fprintf (dump_file, "' with '");
1844 print_generic_expr (dump_file, cached_lhs, dump_flags);
1845 fprintf (dump_file, "'\n");
1848 opt_stats.num_re++;
1850 if (assigns_var_p
1851 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1852 cached_lhs = fold_convert (expr_type, cached_lhs);
1854 propagate_tree_value_into_stmt (gsi, cached_lhs);
1856 /* Since it is always necessary to mark the result as modified,
1857 perhaps we should move this into propagate_tree_value_into_stmt
1858 itself. */
1859 gimple_set_modified (gsi_stmt (*gsi), true);
1863 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1864 the available expressions table or the const_and_copies table.
1865 Detect and record those equivalences. */
1866 /* We handle only very simple copy equivalences here. The heavy
1867 lifing is done by eliminate_redundant_computations. */
1869 static void
1870 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1872 tree lhs;
1873 enum tree_code lhs_code;
1875 gcc_assert (is_gimple_assign (stmt));
1877 lhs = gimple_assign_lhs (stmt);
1878 lhs_code = TREE_CODE (lhs);
1880 if (lhs_code == SSA_NAME
1881 && gimple_assign_single_p (stmt))
1883 tree rhs = gimple_assign_rhs1 (stmt);
1885 /* If the RHS of the assignment is a constant or another variable that
1886 may be propagated, register it in the CONST_AND_COPIES table. We
1887 do not need to record unwind data for this, since this is a true
1888 assignment and not an equivalence inferred from a comparison. All
1889 uses of this ssa name are dominated by this assignment, so unwinding
1890 just costs time and space. */
1891 if (may_optimize_p
1892 && (TREE_CODE (rhs) == SSA_NAME
1893 || is_gimple_min_invariant (rhs)))
1895 if (dump_file && (dump_flags & TDF_DETAILS))
1897 fprintf (dump_file, "==== ASGN ");
1898 print_generic_expr (dump_file, lhs, 0);
1899 fprintf (dump_file, " = ");
1900 print_generic_expr (dump_file, rhs, 0);
1901 fprintf (dump_file, "\n");
1904 set_ssa_name_value (lhs, rhs);
1908 /* A memory store, even an aliased store, creates a useful
1909 equivalence. By exchanging the LHS and RHS, creating suitable
1910 vops and recording the result in the available expression table,
1911 we may be able to expose more redundant loads. */
1912 if (!gimple_has_volatile_ops (stmt)
1913 && gimple_references_memory_p (stmt)
1914 && gimple_assign_single_p (stmt)
1915 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1916 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1917 && !is_gimple_reg (lhs))
1919 tree rhs = gimple_assign_rhs1 (stmt);
1920 gimple new_stmt;
1922 /* Build a new statement with the RHS and LHS exchanged. */
1923 if (TREE_CODE (rhs) == SSA_NAME)
1925 /* NOTE tuples. The call to gimple_build_assign below replaced
1926 a call to build_gimple_modify_stmt, which did not set the
1927 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1928 may cause an SSA validation failure, as the LHS may be a
1929 default-initialized name and should have no definition. I'm
1930 a bit dubious of this, as the artificial statement that we
1931 generate here may in fact be ill-formed, but it is simply
1932 used as an internal device in this pass, and never becomes
1933 part of the CFG. */
1934 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1935 new_stmt = gimple_build_assign (rhs, lhs);
1936 SSA_NAME_DEF_STMT (rhs) = defstmt;
1938 else
1939 new_stmt = gimple_build_assign (rhs, lhs);
1941 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1943 /* Finally enter the statement into the available expression
1944 table. */
1945 lookup_avail_expr (new_stmt, true);
1949 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1950 CONST_AND_COPIES. */
1952 static void
1953 cprop_operand (gimple stmt, use_operand_p op_p)
1955 tree val;
1956 tree op = USE_FROM_PTR (op_p);
1958 /* If the operand has a known constant value or it is known to be a
1959 copy of some other variable, use the value or copy stored in
1960 CONST_AND_COPIES. */
1961 val = SSA_NAME_VALUE (op);
1962 if (val && val != op)
1964 /* Do not change the base variable in the virtual operand
1965 tables. That would make it impossible to reconstruct
1966 the renamed virtual operand if we later modify this
1967 statement. Also only allow the new value to be an SSA_NAME
1968 for propagation into virtual operands. */
1969 if (!is_gimple_reg (op)
1970 && (TREE_CODE (val) != SSA_NAME
1971 || is_gimple_reg (val)
1972 || get_virtual_var (val) != get_virtual_var (op)))
1973 return;
1975 /* Do not replace hard register operands in asm statements. */
1976 if (gimple_code (stmt) == GIMPLE_ASM
1977 && !may_propagate_copy_into_asm (op))
1978 return;
1980 /* Certain operands are not allowed to be copy propagated due
1981 to their interaction with exception handling and some GCC
1982 extensions. */
1983 if (!may_propagate_copy (op, val))
1984 return;
1986 /* Do not propagate addresses that point to volatiles into memory
1987 stmts without volatile operands. */
1988 if (POINTER_TYPE_P (TREE_TYPE (val))
1989 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
1990 && gimple_has_mem_ops (stmt)
1991 && !gimple_has_volatile_ops (stmt))
1992 return;
1994 /* Do not propagate copies if the propagated value is at a deeper loop
1995 depth than the propagatee. Otherwise, this may move loop variant
1996 variables outside of their loops and prevent coalescing
1997 opportunities. If the value was loop invariant, it will be hoisted
1998 by LICM and exposed for copy propagation. */
1999 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2000 return;
2002 /* Do not propagate copies into simple IV increment statements.
2003 See PR23821 for how this can disturb IV analysis. */
2004 if (TREE_CODE (val) != INTEGER_CST
2005 && simple_iv_increment_p (stmt))
2006 return;
2008 /* Dump details. */
2009 if (dump_file && (dump_flags & TDF_DETAILS))
2011 fprintf (dump_file, " Replaced '");
2012 print_generic_expr (dump_file, op, dump_flags);
2013 fprintf (dump_file, "' with %s '",
2014 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2015 print_generic_expr (dump_file, val, dump_flags);
2016 fprintf (dump_file, "'\n");
2019 if (TREE_CODE (val) != SSA_NAME)
2020 opt_stats.num_const_prop++;
2021 else
2022 opt_stats.num_copy_prop++;
2024 propagate_value (op_p, val);
2026 /* And note that we modified this statement. This is now
2027 safe, even if we changed virtual operands since we will
2028 rescan the statement and rewrite its operands again. */
2029 gimple_set_modified (stmt, true);
2033 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2034 known value for that SSA_NAME (or NULL if no value is known).
2036 Propagate values from CONST_AND_COPIES into the uses, vuses and
2037 vdef_ops of STMT. */
2039 static void
2040 cprop_into_stmt (gimple stmt)
2042 use_operand_p op_p;
2043 ssa_op_iter iter;
2045 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2047 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2048 cprop_operand (stmt, op_p);
2052 /* Optimize the statement pointed to by iterator SI.
2054 We try to perform some simplistic global redundancy elimination and
2055 constant propagation:
2057 1- To detect global redundancy, we keep track of expressions that have
2058 been computed in this block and its dominators. If we find that the
2059 same expression is computed more than once, we eliminate repeated
2060 computations by using the target of the first one.
2062 2- Constant values and copy assignments. This is used to do very
2063 simplistic constant and copy propagation. When a constant or copy
2064 assignment is found, we map the value on the RHS of the assignment to
2065 the variable in the LHS in the CONST_AND_COPIES table. */
2067 static void
2068 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2070 gimple stmt, old_stmt;
2071 bool may_optimize_p;
2072 bool modified_p = false;
2074 old_stmt = stmt = gsi_stmt (si);
2076 if (gimple_code (stmt) == GIMPLE_COND)
2077 canonicalize_comparison (stmt);
2079 update_stmt_if_modified (stmt);
2080 opt_stats.num_stmts++;
2082 if (dump_file && (dump_flags & TDF_DETAILS))
2084 fprintf (dump_file, "Optimizing statement ");
2085 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2088 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2089 cprop_into_stmt (stmt);
2091 /* If the statement has been modified with constant replacements,
2092 fold its RHS before checking for redundant computations. */
2093 if (gimple_modified_p (stmt))
2095 tree rhs = NULL;
2097 /* Try to fold the statement making sure that STMT is kept
2098 up to date. */
2099 if (fold_stmt (&si))
2101 stmt = gsi_stmt (si);
2102 gimple_set_modified (stmt, true);
2104 if (dump_file && (dump_flags & TDF_DETAILS))
2106 fprintf (dump_file, " Folded to: ");
2107 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2111 /* We only need to consider cases that can yield a gimple operand. */
2112 if (gimple_assign_single_p (stmt))
2113 rhs = gimple_assign_rhs1 (stmt);
2114 else if (gimple_code (stmt) == GIMPLE_GOTO)
2115 rhs = gimple_goto_dest (stmt);
2116 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2117 /* This should never be an ADDR_EXPR. */
2118 rhs = gimple_switch_index (stmt);
2120 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2121 recompute_tree_invariant_for_addr_expr (rhs);
2123 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2124 even if fold_stmt updated the stmt already and thus cleared
2125 gimple_modified_p flag on it. */
2126 modified_p = true;
2129 /* Check for redundant computations. Do this optimization only
2130 for assignments that have no volatile ops and conditionals. */
2131 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2132 && ((is_gimple_assign (stmt)
2133 && !gimple_rhs_has_side_effects (stmt))
2134 || (is_gimple_call (stmt)
2135 && gimple_call_lhs (stmt) != NULL_TREE
2136 && !gimple_rhs_has_side_effects (stmt))
2137 || gimple_code (stmt) == GIMPLE_COND
2138 || gimple_code (stmt) == GIMPLE_SWITCH));
2140 if (may_optimize_p)
2142 if (gimple_code (stmt) == GIMPLE_CALL)
2144 /* Resolve __builtin_constant_p. If it hasn't been
2145 folded to integer_one_node by now, it's fairly
2146 certain that the value simply isn't constant. */
2147 tree callee = gimple_call_fndecl (stmt);
2148 if (callee
2149 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2150 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2152 propagate_tree_value_into_stmt (&si, integer_zero_node);
2153 stmt = gsi_stmt (si);
2157 update_stmt_if_modified (stmt);
2158 eliminate_redundant_computations (&si);
2159 stmt = gsi_stmt (si);
2162 /* Record any additional equivalences created by this statement. */
2163 if (is_gimple_assign (stmt))
2164 record_equivalences_from_stmt (stmt, may_optimize_p);
2166 /* If STMT is a COND_EXPR and it was modified, then we may know
2167 where it goes. If that is the case, then mark the CFG as altered.
2169 This will cause us to later call remove_unreachable_blocks and
2170 cleanup_tree_cfg when it is safe to do so. It is not safe to
2171 clean things up here since removal of edges and such can trigger
2172 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2173 the manager.
2175 That's all fine and good, except that once SSA_NAMEs are released
2176 to the manager, we must not call create_ssa_name until all references
2177 to released SSA_NAMEs have been eliminated.
2179 All references to the deleted SSA_NAMEs can not be eliminated until
2180 we remove unreachable blocks.
2182 We can not remove unreachable blocks until after we have completed
2183 any queued jump threading.
2185 We can not complete any queued jump threads until we have taken
2186 appropriate variables out of SSA form. Taking variables out of
2187 SSA form can call create_ssa_name and thus we lose.
2189 Ultimately I suspect we're going to need to change the interface
2190 into the SSA_NAME manager. */
2191 if (gimple_modified_p (stmt) || modified_p)
2193 tree val = NULL;
2195 update_stmt_if_modified (stmt);
2197 if (gimple_code (stmt) == GIMPLE_COND)
2198 val = fold_binary_loc (gimple_location (stmt),
2199 gimple_cond_code (stmt), boolean_type_node,
2200 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2201 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2202 val = gimple_switch_index (stmt);
2204 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2205 cfg_altered = true;
2207 /* If we simplified a statement in such a way as to be shown that it
2208 cannot trap, update the eh information and the cfg to match. */
2209 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2211 bitmap_set_bit (need_eh_cleanup, bb->index);
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 fprintf (dump_file, " Flagged to clear EH edges.\n");
2218 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2219 If found, return its LHS. Otherwise insert STMT in the table and
2220 return NULL_TREE.
2222 Also, when an expression is first inserted in the table, it is also
2223 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2224 we finish processing this block and its children. */
2226 static tree
2227 lookup_avail_expr (gimple stmt, bool insert)
2229 void **slot;
2230 tree lhs;
2231 tree temp;
2232 struct expr_hash_elt element;
2234 /* Get LHS of assignment or call, else NULL_TREE. */
2235 lhs = gimple_get_lhs (stmt);
2237 initialize_hash_element (stmt, lhs, &element);
2239 if (dump_file && (dump_flags & TDF_DETAILS))
2241 fprintf (dump_file, "LKUP ");
2242 print_expr_hash_elt (dump_file, &element);
2245 /* Don't bother remembering constant assignments and copy operations.
2246 Constants and copy operations are handled by the constant/copy propagator
2247 in optimize_stmt. */
2248 if (element.expr.kind == EXPR_SINGLE
2249 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2250 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2251 return NULL_TREE;
2253 /* Finally try to find the expression in the main expression hash table. */
2254 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2255 (insert ? INSERT : NO_INSERT));
2256 if (slot == NULL)
2257 return NULL_TREE;
2259 if (*slot == NULL)
2261 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2262 *element2 = element;
2263 element2->stamp = element2;
2264 *slot = (void *) element2;
2266 if (dump_file && (dump_flags & TDF_DETAILS))
2268 fprintf (dump_file, "2>>> ");
2269 print_expr_hash_elt (dump_file, element2);
2272 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
2273 return NULL_TREE;
2276 /* Extract the LHS of the assignment so that it can be used as the current
2277 definition of another variable. */
2278 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2280 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2281 use the value from the const_and_copies table. */
2282 if (TREE_CODE (lhs) == SSA_NAME)
2284 temp = SSA_NAME_VALUE (lhs);
2285 if (temp)
2286 lhs = temp;
2289 if (dump_file && (dump_flags & TDF_DETAILS))
2291 fprintf (dump_file, "FIND: ");
2292 print_generic_expr (dump_file, lhs, 0);
2293 fprintf (dump_file, "\n");
2296 return lhs;
2299 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2300 for expressions using the code of the expression and the SSA numbers of
2301 its operands. */
2303 static hashval_t
2304 avail_expr_hash (const void *p)
2306 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2307 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2308 tree vuse;
2309 hashval_t val = 0;
2311 val = iterative_hash_hashable_expr (expr, val);
2313 /* If the hash table entry is not associated with a statement, then we
2314 can just hash the expression and not worry about virtual operands
2315 and such. */
2316 if (!stmt)
2317 return val;
2319 /* Add the SSA version numbers of the vuse operand. This is important
2320 because compound variables like arrays are not renamed in the
2321 operands. Rather, the rename is done on the virtual variable
2322 representing all the elements of the array. */
2323 if ((vuse = gimple_vuse (stmt)))
2324 val = iterative_hash_expr (vuse, val);
2326 return val;
2329 static hashval_t
2330 real_avail_expr_hash (const void *p)
2332 return ((const struct expr_hash_elt *)p)->hash;
2335 static int
2336 avail_expr_eq (const void *p1, const void *p2)
2338 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2339 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2340 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2341 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2342 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2343 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2345 /* This case should apply only when removing entries from the table. */
2346 if (stamp1 == stamp2)
2347 return true;
2349 /* FIXME tuples:
2350 We add stmts to a hash table and them modify them. To detect the case
2351 that we modify a stmt and then search for it, we assume that the hash
2352 is always modified by that change.
2353 We have to fully check why this doesn't happen on trunk or rewrite
2354 this in a more reliable (and easier to understand) way. */
2355 if (((const struct expr_hash_elt *)p1)->hash
2356 != ((const struct expr_hash_elt *)p2)->hash)
2357 return false;
2359 /* In case of a collision, both RHS have to be identical and have the
2360 same VUSE operands. */
2361 if (hashable_expr_equal_p (expr1, expr2)
2362 && types_compatible_p (expr1->type, expr2->type))
2364 /* Note that STMT1 and/or STMT2 may be NULL. */
2365 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2366 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2369 return false;
2372 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2373 up degenerate PHIs created by or exposed by jump threading. */
2375 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2376 NULL. */
2378 tree
2379 degenerate_phi_result (gimple phi)
2381 tree lhs = gimple_phi_result (phi);
2382 tree val = NULL;
2383 size_t i;
2385 /* Ignoring arguments which are the same as LHS, if all the remaining
2386 arguments are the same, then the PHI is a degenerate and has the
2387 value of that common argument. */
2388 for (i = 0; i < gimple_phi_num_args (phi); i++)
2390 tree arg = gimple_phi_arg_def (phi, i);
2392 if (arg == lhs)
2393 continue;
2394 else if (!arg)
2395 break;
2396 else if (!val)
2397 val = arg;
2398 else if (arg == val)
2399 continue;
2400 /* We bring in some of operand_equal_p not only to speed things
2401 up, but also to avoid crashing when dereferencing the type of
2402 a released SSA name. */
2403 else if (TREE_CODE (val) != TREE_CODE (arg)
2404 || TREE_CODE (val) == SSA_NAME
2405 || !operand_equal_p (arg, val, 0))
2406 break;
2408 return (i == gimple_phi_num_args (phi) ? val : NULL);
2411 /* Given a statement STMT, which is either a PHI node or an assignment,
2412 remove it from the IL. */
2414 static void
2415 remove_stmt_or_phi (gimple stmt)
2417 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2419 if (gimple_code (stmt) == GIMPLE_PHI)
2420 remove_phi_node (&gsi, true);
2421 else
2423 gsi_remove (&gsi, true);
2424 release_defs (stmt);
2428 /* Given a statement STMT, which is either a PHI node or an assignment,
2429 return the "rhs" of the node, in the case of a non-degenerate
2430 phi, NULL is returned. */
2432 static tree
2433 get_rhs_or_phi_arg (gimple stmt)
2435 if (gimple_code (stmt) == GIMPLE_PHI)
2436 return degenerate_phi_result (stmt);
2437 else if (gimple_assign_single_p (stmt))
2438 return gimple_assign_rhs1 (stmt);
2439 else
2440 gcc_unreachable ();
2444 /* Given a statement STMT, which is either a PHI node or an assignment,
2445 return the "lhs" of the node. */
2447 static tree
2448 get_lhs_or_phi_result (gimple stmt)
2450 if (gimple_code (stmt) == GIMPLE_PHI)
2451 return gimple_phi_result (stmt);
2452 else if (is_gimple_assign (stmt))
2453 return gimple_assign_lhs (stmt);
2454 else
2455 gcc_unreachable ();
2458 /* Propagate RHS into all uses of LHS (when possible).
2460 RHS and LHS are derived from STMT, which is passed in solely so
2461 that we can remove it if propagation is successful.
2463 When propagating into a PHI node or into a statement which turns
2464 into a trivial copy or constant initialization, set the
2465 appropriate bit in INTERESTING_NAMEs so that we will visit those
2466 nodes as well in an effort to pick up secondary optimization
2467 opportunities. */
2469 static void
2470 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2472 /* First verify that propagation is valid and isn't going to move a
2473 loop variant variable outside its loop. */
2474 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2475 && (TREE_CODE (rhs) != SSA_NAME
2476 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2477 && may_propagate_copy (lhs, rhs)
2478 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2480 use_operand_p use_p;
2481 imm_use_iterator iter;
2482 gimple use_stmt;
2483 bool all = true;
2485 /* Dump details. */
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2488 fprintf (dump_file, " Replacing '");
2489 print_generic_expr (dump_file, lhs, dump_flags);
2490 fprintf (dump_file, "' with %s '",
2491 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2492 print_generic_expr (dump_file, rhs, dump_flags);
2493 fprintf (dump_file, "'\n");
2496 /* Walk over every use of LHS and try to replace the use with RHS.
2497 At this point the only reason why such a propagation would not
2498 be successful would be if the use occurs in an ASM_EXPR. */
2499 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2501 /* Leave debug stmts alone. If we succeed in propagating
2502 all non-debug uses, we'll drop the DEF, and propagation
2503 into debug stmts will occur then. */
2504 if (gimple_debug_bind_p (use_stmt))
2505 continue;
2507 /* It's not always safe to propagate into an ASM_EXPR. */
2508 if (gimple_code (use_stmt) == GIMPLE_ASM
2509 && ! may_propagate_copy_into_asm (lhs))
2511 all = false;
2512 continue;
2515 /* Dump details. */
2516 if (dump_file && (dump_flags & TDF_DETAILS))
2518 fprintf (dump_file, " Original statement:");
2519 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2522 /* Propagate the RHS into this use of the LHS. */
2523 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2524 propagate_value (use_p, rhs);
2526 /* Special cases to avoid useless calls into the folding
2527 routines, operand scanning, etc.
2529 First, propagation into a PHI may cause the PHI to become
2530 a degenerate, so mark the PHI as interesting. No other
2531 actions are necessary.
2533 Second, if we're propagating a virtual operand and the
2534 propagation does not change the underlying _DECL node for
2535 the virtual operand, then no further actions are necessary. */
2536 if (gimple_code (use_stmt) == GIMPLE_PHI
2537 || (! is_gimple_reg (lhs)
2538 && TREE_CODE (rhs) == SSA_NAME
2539 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2541 /* Dump details. */
2542 if (dump_file && (dump_flags & TDF_DETAILS))
2544 fprintf (dump_file, " Updated statement:");
2545 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2548 /* Propagation into a PHI may expose new degenerate PHIs,
2549 so mark the result of the PHI as interesting. */
2550 if (gimple_code (use_stmt) == GIMPLE_PHI)
2552 tree result = get_lhs_or_phi_result (use_stmt);
2553 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2556 continue;
2559 /* From this point onward we are propagating into a
2560 real statement. Folding may (or may not) be possible,
2561 we may expose new operands, expose dead EH edges,
2562 etc. */
2563 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2564 cannot fold a call that simplifies to a constant,
2565 because the GIMPLE_CALL must be replaced by a
2566 GIMPLE_ASSIGN, and there is no way to effect such a
2567 transformation in-place. We might want to consider
2568 using the more general fold_stmt here. */
2569 fold_stmt_inplace (use_stmt);
2571 /* Sometimes propagation can expose new operands to the
2572 renamer. */
2573 update_stmt (use_stmt);
2575 /* Dump details. */
2576 if (dump_file && (dump_flags & TDF_DETAILS))
2578 fprintf (dump_file, " Updated statement:");
2579 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2582 /* If we replaced a variable index with a constant, then
2583 we would need to update the invariant flag for ADDR_EXPRs. */
2584 if (gimple_assign_single_p (use_stmt)
2585 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2586 recompute_tree_invariant_for_addr_expr
2587 (gimple_assign_rhs1 (use_stmt));
2589 /* If we cleaned up EH information from the statement,
2590 mark its containing block as needing EH cleanups. */
2591 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2593 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2595 fprintf (dump_file, " Flagged to clear EH edges.\n");
2598 /* Propagation may expose new trivial copy/constant propagation
2599 opportunities. */
2600 if (gimple_assign_single_p (use_stmt)
2601 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2602 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2603 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2605 tree result = get_lhs_or_phi_result (use_stmt);
2606 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2609 /* Propagation into these nodes may make certain edges in
2610 the CFG unexecutable. We want to identify them as PHI nodes
2611 at the destination of those unexecutable edges may become
2612 degenerates. */
2613 else if (gimple_code (use_stmt) == GIMPLE_COND
2614 || gimple_code (use_stmt) == GIMPLE_SWITCH
2615 || gimple_code (use_stmt) == GIMPLE_GOTO)
2617 tree val;
2619 if (gimple_code (use_stmt) == GIMPLE_COND)
2620 val = fold_binary_loc (gimple_location (use_stmt),
2621 gimple_cond_code (use_stmt),
2622 boolean_type_node,
2623 gimple_cond_lhs (use_stmt),
2624 gimple_cond_rhs (use_stmt));
2625 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2626 val = gimple_switch_index (use_stmt);
2627 else
2628 val = gimple_goto_dest (use_stmt);
2630 if (val && is_gimple_min_invariant (val))
2632 basic_block bb = gimple_bb (use_stmt);
2633 edge te = find_taken_edge (bb, val);
2634 edge_iterator ei;
2635 edge e;
2636 gimple_stmt_iterator gsi, psi;
2638 /* Remove all outgoing edges except TE. */
2639 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2641 if (e != te)
2643 /* Mark all the PHI nodes at the destination of
2644 the unexecutable edge as interesting. */
2645 for (psi = gsi_start_phis (e->dest);
2646 !gsi_end_p (psi);
2647 gsi_next (&psi))
2649 gimple phi = gsi_stmt (psi);
2651 tree result = gimple_phi_result (phi);
2652 int version = SSA_NAME_VERSION (result);
2654 bitmap_set_bit (interesting_names, version);
2657 te->probability += e->probability;
2659 te->count += e->count;
2660 remove_edge (e);
2661 cfg_altered = true;
2663 else
2664 ei_next (&ei);
2667 gsi = gsi_last_bb (gimple_bb (use_stmt));
2668 gsi_remove (&gsi, true);
2670 /* And fixup the flags on the single remaining edge. */
2671 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2672 te->flags &= ~EDGE_ABNORMAL;
2673 te->flags |= EDGE_FALLTHRU;
2674 if (te->probability > REG_BR_PROB_BASE)
2675 te->probability = REG_BR_PROB_BASE;
2680 /* Ensure there is nothing else to do. */
2681 gcc_assert (!all || has_zero_uses (lhs));
2683 /* If we were able to propagate away all uses of LHS, then
2684 we can remove STMT. */
2685 if (all)
2686 remove_stmt_or_phi (stmt);
2690 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2691 a statement that is a trivial copy or constant initialization.
2693 Attempt to eliminate T by propagating its RHS into all uses of
2694 its LHS. This may in turn set new bits in INTERESTING_NAMES
2695 for nodes we want to revisit later.
2697 All exit paths should clear INTERESTING_NAMES for the result
2698 of STMT. */
2700 static void
2701 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2703 tree lhs = get_lhs_or_phi_result (stmt);
2704 tree rhs;
2705 int version = SSA_NAME_VERSION (lhs);
2707 /* If the LHS of this statement or PHI has no uses, then we can
2708 just eliminate it. This can occur if, for example, the PHI
2709 was created by block duplication due to threading and its only
2710 use was in the conditional at the end of the block which was
2711 deleted. */
2712 if (has_zero_uses (lhs))
2714 bitmap_clear_bit (interesting_names, version);
2715 remove_stmt_or_phi (stmt);
2716 return;
2719 /* Get the RHS of the assignment or PHI node if the PHI is a
2720 degenerate. */
2721 rhs = get_rhs_or_phi_arg (stmt);
2722 if (!rhs)
2724 bitmap_clear_bit (interesting_names, version);
2725 return;
2728 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2730 /* Note that STMT may well have been deleted by now, so do
2731 not access it, instead use the saved version # to clear
2732 T's entry in the worklist. */
2733 bitmap_clear_bit (interesting_names, version);
2736 /* The first phase in degenerate PHI elimination.
2738 Eliminate the degenerate PHIs in BB, then recurse on the
2739 dominator children of BB. */
2741 static void
2742 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2744 gimple_stmt_iterator gsi;
2745 basic_block son;
2747 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2749 gimple phi = gsi_stmt (gsi);
2751 eliminate_const_or_copy (phi, interesting_names);
2754 /* Recurse into the dominator children of BB. */
2755 for (son = first_dom_son (CDI_DOMINATORS, bb);
2756 son;
2757 son = next_dom_son (CDI_DOMINATORS, son))
2758 eliminate_degenerate_phis_1 (son, interesting_names);
2762 /* A very simple pass to eliminate degenerate PHI nodes from the
2763 IL. This is meant to be fast enough to be able to be run several
2764 times in the optimization pipeline.
2766 Certain optimizations, particularly those which duplicate blocks
2767 or remove edges from the CFG can create or expose PHIs which are
2768 trivial copies or constant initializations.
2770 While we could pick up these optimizations in DOM or with the
2771 combination of copy-prop and CCP, those solutions are far too
2772 heavy-weight for our needs.
2774 This implementation has two phases so that we can efficiently
2775 eliminate the first order degenerate PHIs and second order
2776 degenerate PHIs.
2778 The first phase performs a dominator walk to identify and eliminate
2779 the vast majority of the degenerate PHIs. When a degenerate PHI
2780 is identified and eliminated any affected statements or PHIs
2781 are put on a worklist.
2783 The second phase eliminates degenerate PHIs and trivial copies
2784 or constant initializations using the worklist. This is how we
2785 pick up the secondary optimization opportunities with minimal
2786 cost. */
2788 static unsigned int
2789 eliminate_degenerate_phis (void)
2791 bitmap interesting_names;
2792 bitmap interesting_names1;
2794 /* Bitmap of blocks which need EH information updated. We can not
2795 update it on-the-fly as doing so invalidates the dominator tree. */
2796 need_eh_cleanup = BITMAP_ALLOC (NULL);
2798 /* INTERESTING_NAMES is effectively our worklist, indexed by
2799 SSA_NAME_VERSION.
2801 A set bit indicates that the statement or PHI node which
2802 defines the SSA_NAME should be (re)examined to determine if
2803 it has become a degenerate PHI or trivial const/copy propagation
2804 opportunity.
2806 Experiments have show we generally get better compilation
2807 time behavior with bitmaps rather than sbitmaps. */
2808 interesting_names = BITMAP_ALLOC (NULL);
2809 interesting_names1 = BITMAP_ALLOC (NULL);
2811 calculate_dominance_info (CDI_DOMINATORS);
2812 cfg_altered = false;
2814 /* First phase. Eliminate degenerate PHIs via a dominator
2815 walk of the CFG.
2817 Experiments have indicated that we generally get better
2818 compile-time behavior by visiting blocks in the first
2819 phase in dominator order. Presumably this is because walking
2820 in dominator order leaves fewer PHIs for later examination
2821 by the worklist phase. */
2822 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2824 /* Second phase. Eliminate second order degenerate PHIs as well
2825 as trivial copies or constant initializations identified by
2826 the first phase or this phase. Basically we keep iterating
2827 until our set of INTERESTING_NAMEs is empty. */
2828 while (!bitmap_empty_p (interesting_names))
2830 unsigned int i;
2831 bitmap_iterator bi;
2833 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2834 changed during the loop. Copy it to another bitmap and
2835 use that. */
2836 bitmap_copy (interesting_names1, interesting_names);
2838 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2840 tree name = ssa_name (i);
2842 /* Ignore SSA_NAMEs that have been released because
2843 their defining statement was deleted (unreachable). */
2844 if (name)
2845 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2846 interesting_names);
2850 if (cfg_altered)
2851 free_dominance_info (CDI_DOMINATORS);
2853 /* Propagation of const and copies may make some EH edges dead. Purge
2854 such edges from the CFG as needed. */
2855 if (!bitmap_empty_p (need_eh_cleanup))
2857 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2858 BITMAP_FREE (need_eh_cleanup);
2861 BITMAP_FREE (interesting_names);
2862 BITMAP_FREE (interesting_names1);
2863 return 0;
2866 struct gimple_opt_pass pass_phi_only_cprop =
2869 GIMPLE_PASS,
2870 "phicprop", /* name */
2871 gate_dominator, /* gate */
2872 eliminate_degenerate_phis, /* execute */
2873 NULL, /* sub */
2874 NULL, /* next */
2875 0, /* static_pass_number */
2876 TV_TREE_PHI_CPROP, /* tv_id */
2877 PROP_cfg | PROP_ssa, /* properties_required */
2878 0, /* properties_provided */
2879 0, /* properties_destroyed */
2880 0, /* todo_flags_start */
2881 TODO_cleanup_cfg
2882 | TODO_dump_func
2883 | TODO_ggc_collect
2884 | TODO_verify_ssa
2885 | TODO_verify_stmts
2886 | TODO_update_ssa /* todo_flags_finish */