Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / tree-ssa-dom.c
blob3f7cbfe48746892c9074381f6b9ccbdd74ef9a50
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "cfgloop.h"
33 #include "output.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
45 #include "params.h"
47 /* This file implements optimizations on the dominator tree. */
49 /* Representation of a "naked" right-hand-side expression, to be used
50 in recording available expressions in the expression hash table. */
52 enum expr_kind
54 EXPR_SINGLE,
55 EXPR_UNARY,
56 EXPR_BINARY,
57 EXPR_CALL
60 struct hashable_expr
62 tree type;
63 enum expr_kind kind;
64 union {
65 struct { tree rhs; } single;
66 struct { enum tree_code op; tree opnd; } unary;
67 struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
68 struct { tree fn; bool pure; size_t nargs; tree *args; } call;
69 } ops;
72 /* Structure for recording known values of a conditional expression
73 at the exits from its block. */
75 struct cond_equivalence
77 struct hashable_expr cond;
78 tree value;
81 /* Structure for recording edge equivalences as well as any pending
82 edge redirections during the dominator optimizer.
84 Computing and storing the edge equivalences instead of creating
85 them on-demand can save significant amounts of time, particularly
86 for pathological cases involving switch statements.
88 These structures live for a single iteration of the dominator
89 optimizer in the edge's AUX field. At the end of an iteration we
90 free each of these structures and update the AUX field to point
91 to any requested redirection target (the code for updating the
92 CFG and SSA graph for edge redirection expects redirection edge
93 targets to be in the AUX field for each edge. */
95 struct edge_info
97 /* If this edge creates a simple equivalence, the LHS and RHS of
98 the equivalence will be stored here. */
99 tree lhs;
100 tree rhs;
102 /* Traversing an edge may also indicate one or more particular conditions
103 are true or false. The number of recorded conditions can vary, but
104 can be determined by the condition's code. So we have an array
105 and its maximum index rather than use a varray. */
106 struct cond_equivalence *cond_equivalences;
107 unsigned int max_cond_equivalences;
110 /* Hash table with expressions made available during the renaming process.
111 When an assignment of the form X_i = EXPR is found, the statement is
112 stored in this table. If the same expression EXPR is later found on the
113 RHS of another statement, it is replaced with X_i (thus performing
114 global redundancy elimination). Similarly as we pass through conditionals
115 we record the conditional itself as having either a true or false value
116 in this table. */
117 static htab_t avail_exprs;
119 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
120 expressions it enters into the hash table along with a marker entry
121 (null). When we finish processing the block, we pop off entries and
122 remove the expressions from the global hash table until we hit the
123 marker. */
124 typedef struct expr_hash_elt * expr_hash_elt_t;
125 DEF_VEC_P(expr_hash_elt_t);
126 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
128 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
130 /* Stack of statements we need to rescan during finalization for newly
131 exposed variables.
133 Statement rescanning must occur after the current block's available
134 expressions are removed from AVAIL_EXPRS. Else we may change the
135 hash code for an expression and be unable to find/remove it from
136 AVAIL_EXPRS. */
137 static VEC(gimple,heap) *stmts_to_rescan;
139 /* Structure for entries in the expression hash table. */
141 struct expr_hash_elt
143 /* The value (lhs) of this expression. */
144 tree lhs;
146 /* The expression (rhs) we want to record. */
147 struct hashable_expr expr;
149 /* The stmt pointer if this element corresponds to a statement. */
150 gimple stmt;
152 /* The hash value for RHS. */
153 hashval_t hash;
155 /* A unique stamp, typically the address of the hash
156 element itself, used in removing entries from the table. */
157 struct expr_hash_elt *stamp;
160 /* Stack of dest,src pairs that need to be restored during finalization.
162 A NULL entry is used to mark the end of pairs which need to be
163 restored during finalization of this block. */
164 static VEC(tree,heap) *const_and_copies_stack;
166 /* Track whether or not we have changed the control flow graph. */
167 static bool cfg_altered;
169 /* Bitmap of blocks that have had EH statements cleaned. We should
170 remove their dead edges eventually. */
171 static bitmap need_eh_cleanup;
173 /* Statistics for dominator optimizations. */
174 struct opt_stats_d
176 long num_stmts;
177 long num_exprs_considered;
178 long num_re;
179 long num_const_prop;
180 long num_copy_prop;
183 static struct opt_stats_d opt_stats;
185 /* Local functions. */
186 static void optimize_stmt (basic_block, gimple_stmt_iterator);
187 static tree lookup_avail_expr (gimple, bool);
188 static hashval_t avail_expr_hash (const void *);
189 static hashval_t real_avail_expr_hash (const void *);
190 static int avail_expr_eq (const void *, const void *);
191 static void htab_statistics (FILE *, htab_t);
192 static void record_cond (struct cond_equivalence *);
193 static void record_const_or_copy (tree, tree);
194 static void record_equality (tree, tree);
195 static void record_equivalences_from_phis (basic_block);
196 static void record_equivalences_from_incoming_edge (basic_block);
197 static bool eliminate_redundant_computations (gimple_stmt_iterator *);
198 static void record_equivalences_from_stmt (gimple, int);
199 static void dom_thread_across_edge (struct dom_walk_data *, edge);
200 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
201 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
202 static void remove_local_expressions_from_table (void);
203 static void restore_vars_to_original_value (void);
204 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
207 /* Given a statement STMT, initialize the hash table element pointed to
208 by ELEMENT. */
210 static void
211 initialize_hash_element (gimple stmt, tree lhs,
212 struct expr_hash_elt *element)
214 enum gimple_code code = gimple_code (stmt);
215 struct hashable_expr *expr = &element->expr;
217 if (code == GIMPLE_ASSIGN)
219 enum tree_code subcode = gimple_assign_rhs_code (stmt);
221 expr->type = NULL_TREE;
223 switch (get_gimple_rhs_class (subcode))
225 case GIMPLE_SINGLE_RHS:
226 expr->kind = EXPR_SINGLE;
227 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
228 break;
229 case GIMPLE_UNARY_RHS:
230 expr->kind = EXPR_UNARY;
231 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
232 expr->ops.unary.op = subcode;
233 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
234 break;
235 case GIMPLE_BINARY_RHS:
236 expr->kind = EXPR_BINARY;
237 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
238 expr->ops.binary.op = subcode;
239 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
240 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
241 break;
242 default:
243 gcc_unreachable ();
246 else if (code == GIMPLE_COND)
248 expr->type = boolean_type_node;
249 expr->kind = EXPR_BINARY;
250 expr->ops.binary.op = gimple_cond_code (stmt);
251 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
252 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
254 else if (code == GIMPLE_CALL)
256 size_t nargs = gimple_call_num_args (stmt);
257 size_t i;
259 gcc_assert (gimple_call_lhs (stmt));
261 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
262 expr->kind = EXPR_CALL;
263 expr->ops.call.fn = gimple_call_fn (stmt);
265 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
266 expr->ops.call.pure = true;
267 else
268 expr->ops.call.pure = false;
270 expr->ops.call.nargs = nargs;
271 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
272 for (i = 0; i < nargs; i++)
273 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
275 else if (code == GIMPLE_SWITCH)
277 expr->type = TREE_TYPE (gimple_switch_index (stmt));
278 expr->kind = EXPR_SINGLE;
279 expr->ops.single.rhs = gimple_switch_index (stmt);
281 else if (code == GIMPLE_GOTO)
283 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
284 expr->kind = EXPR_SINGLE;
285 expr->ops.single.rhs = gimple_goto_dest (stmt);
287 else
288 gcc_unreachable ();
290 element->lhs = lhs;
291 element->stmt = stmt;
292 element->hash = avail_expr_hash (element);
293 element->stamp = element;
296 /* Given a conditional expression COND as a tree, initialize
297 a hashable_expr expression EXPR. The conditional must be a
298 comparison or logical negation. A constant or a variable is
299 not permitted. */
301 static void
302 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
304 expr->type = boolean_type_node;
306 if (COMPARISON_CLASS_P (cond))
308 expr->kind = EXPR_BINARY;
309 expr->ops.binary.op = TREE_CODE (cond);
310 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
311 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
313 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
315 expr->kind = EXPR_UNARY;
316 expr->ops.unary.op = TRUTH_NOT_EXPR;
317 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
319 else
320 gcc_unreachable ();
323 /* Given a hashable_expr expression EXPR and an LHS,
324 initialize the hash table element pointed to by ELEMENT. */
326 static void
327 initialize_hash_element_from_expr (struct hashable_expr *expr,
328 tree lhs,
329 struct expr_hash_elt *element)
331 element->expr = *expr;
332 element->lhs = lhs;
333 element->stmt = NULL;
334 element->hash = avail_expr_hash (element);
335 element->stamp = element;
338 /* Compare two hashable_expr structures for equivalence.
339 They are considered equivalent when the the expressions
340 they denote must necessarily be equal. The logic is intended
341 to follow that of operand_equal_p in fold-const.c */
343 static bool
344 hashable_expr_equal_p (const struct hashable_expr *expr0,
345 const struct hashable_expr *expr1)
347 tree type0 = expr0->type;
348 tree type1 = expr1->type;
350 /* If either type is NULL, there is nothing to check. */
351 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
352 return false;
354 /* If both types don't have the same signedness, precision, and mode,
355 then we can't consider them equal. */
356 if (type0 != type1
357 && (TREE_CODE (type0) == ERROR_MARK
358 || TREE_CODE (type1) == ERROR_MARK
359 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
360 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
361 || TYPE_MODE (type0) != TYPE_MODE (type1)))
362 return false;
364 if (expr0->kind != expr1->kind)
365 return false;
367 switch (expr0->kind)
369 case EXPR_SINGLE:
370 return operand_equal_p (expr0->ops.single.rhs,
371 expr1->ops.single.rhs, 0);
373 case EXPR_UNARY:
374 if (expr0->ops.unary.op != expr1->ops.unary.op)
375 return false;
377 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
378 || expr0->ops.unary.op == NON_LVALUE_EXPR)
379 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
380 return false;
382 return operand_equal_p (expr0->ops.unary.opnd,
383 expr1->ops.unary.opnd, 0);
385 case EXPR_BINARY:
387 if (expr0->ops.binary.op != expr1->ops.binary.op)
388 return false;
390 if (operand_equal_p (expr0->ops.binary.opnd0,
391 expr1->ops.binary.opnd0, 0)
392 && operand_equal_p (expr0->ops.binary.opnd1,
393 expr1->ops.binary.opnd1, 0))
394 return true;
396 /* For commutative ops, allow the other order. */
397 return (commutative_tree_code (expr0->ops.binary.op)
398 && operand_equal_p (expr0->ops.binary.opnd0,
399 expr1->ops.binary.opnd1, 0)
400 && operand_equal_p (expr0->ops.binary.opnd1,
401 expr1->ops.binary.opnd0, 0));
404 case EXPR_CALL:
406 size_t i;
408 /* If the calls are to different functions, then they
409 clearly cannot be equal. */
410 if (! operand_equal_p (expr0->ops.call.fn,
411 expr1->ops.call.fn, 0))
412 return false;
414 if (! expr0->ops.call.pure)
415 return false;
417 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
418 return false;
420 for (i = 0; i < expr0->ops.call.nargs; i++)
421 if (! operand_equal_p (expr0->ops.call.args[i],
422 expr1->ops.call.args[i], 0))
423 return false;
425 return true;
428 default:
429 gcc_unreachable ();
433 /* Compute a hash value for a hashable_expr value EXPR and a
434 previously accumulated hash value VAL. If two hashable_expr
435 values compare equal with hashable_expr_equal_p, they must
436 hash to the same value, given an identical value of VAL.
437 The logic is intended to follow iterative_hash_expr in tree.c. */
439 static hashval_t
440 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
442 switch (expr->kind)
444 case EXPR_SINGLE:
445 val = iterative_hash_expr (expr->ops.single.rhs, val);
446 break;
448 case EXPR_UNARY:
449 val = iterative_hash_object (expr->ops.unary.op, val);
451 /* Make sure to include signedness in the hash computation.
452 Don't hash the type, that can lead to having nodes which
453 compare equal according to operand_equal_p, but which
454 have different hash codes. */
455 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
456 || expr->ops.unary.op == NON_LVALUE_EXPR)
457 val += TYPE_UNSIGNED (expr->type);
459 val = iterative_hash_expr (expr->ops.unary.opnd, val);
460 break;
462 case EXPR_BINARY:
463 val = iterative_hash_object (expr->ops.binary.op, val);
464 if (commutative_tree_code (expr->ops.binary.op))
465 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
466 expr->ops.binary.opnd1, val);
467 else
469 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
470 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
472 break;
474 case EXPR_CALL:
476 size_t i;
477 enum tree_code code = CALL_EXPR;
479 val = iterative_hash_object (code, val);
480 val = iterative_hash_expr (expr->ops.call.fn, val);
481 for (i = 0; i < expr->ops.call.nargs; i++)
482 val = iterative_hash_expr (expr->ops.call.args[i], val);
484 break;
486 default:
487 gcc_unreachable ();
490 return val;
493 /* Print a diagnostic dump of an expression hash table entry. */
495 static void
496 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
498 if (element->stmt)
499 fprintf (stream, "STMT ");
500 else
501 fprintf (stream, "COND ");
503 if (element->lhs)
505 print_generic_expr (stream, element->lhs, 0);
506 fprintf (stream, " = ");
509 switch (element->expr.kind)
511 case EXPR_SINGLE:
512 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
513 break;
515 case EXPR_UNARY:
516 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
517 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
518 break;
520 case EXPR_BINARY:
521 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
522 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
523 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
524 break;
526 case EXPR_CALL:
528 size_t i;
529 size_t nargs = element->expr.ops.call.nargs;
531 print_generic_expr (stream, element->expr.ops.call.fn, 0);
532 fprintf (stream, " (");
533 for (i = 0; i < nargs; i++)
535 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
536 if (i + 1 < nargs)
537 fprintf (stream, ", ");
539 fprintf (stream, ")");
541 break;
543 fprintf (stream, "\n");
545 if (element->stmt)
547 fprintf (stream, " ");
548 print_gimple_stmt (stream, element->stmt, 0, 0);
552 /* Delete an expr_hash_elt and reclaim its storage. */
554 static void
555 free_expr_hash_elt (void *elt)
557 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
559 if (element->expr.kind == EXPR_CALL)
560 free (element->expr.ops.call.args);
562 free (element);
565 /* Allocate an EDGE_INFO for edge E and attach it to E.
566 Return the new EDGE_INFO structure. */
568 static struct edge_info *
569 allocate_edge_info (edge e)
571 struct edge_info *edge_info;
573 edge_info = XCNEW (struct edge_info);
575 e->aux = edge_info;
576 return edge_info;
579 /* Free all EDGE_INFO structures associated with edges in the CFG.
580 If a particular edge can be threaded, copy the redirection
581 target from the EDGE_INFO structure into the edge's AUX field
582 as required by code to update the CFG and SSA graph for
583 jump threading. */
585 static void
586 free_all_edge_infos (void)
588 basic_block bb;
589 edge_iterator ei;
590 edge e;
592 FOR_EACH_BB (bb)
594 FOR_EACH_EDGE (e, ei, bb->preds)
596 struct edge_info *edge_info = (struct edge_info *) e->aux;
598 if (edge_info)
600 if (edge_info->cond_equivalences)
601 free (edge_info->cond_equivalences);
602 free (edge_info);
603 e->aux = NULL;
609 /* Jump threading, redundancy elimination and const/copy propagation.
611 This pass may expose new symbols that need to be renamed into SSA. For
612 every new symbol exposed, its corresponding bit will be set in
613 VARS_TO_RENAME. */
615 static unsigned int
616 tree_ssa_dominator_optimize (void)
618 struct dom_walk_data walk_data;
620 memset (&opt_stats, 0, sizeof (opt_stats));
622 /* Create our hash tables. */
623 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
624 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
625 const_and_copies_stack = VEC_alloc (tree, heap, 20);
626 stmts_to_rescan = VEC_alloc (gimple, heap, 20);
627 need_eh_cleanup = BITMAP_ALLOC (NULL);
629 /* Setup callbacks for the generic dominator tree walker. */
630 walk_data.dom_direction = CDI_DOMINATORS;
631 walk_data.initialize_block_local_data = NULL;
632 walk_data.before_dom_children = dom_opt_enter_block;
633 walk_data.after_dom_children = dom_opt_leave_block;
634 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
635 When we attach more stuff we'll need to fill this out with a real
636 structure. */
637 walk_data.global_data = NULL;
638 walk_data.block_local_data_size = 0;
640 /* Now initialize the dominator walker. */
641 init_walk_dominator_tree (&walk_data);
643 calculate_dominance_info (CDI_DOMINATORS);
644 cfg_altered = false;
646 /* We need to know loop structures in order to avoid destroying them
647 in jump threading. Note that we still can e.g. thread through loop
648 headers to an exit edge, or through loop header to the loop body, assuming
649 that we update the loop info. */
650 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
652 /* Initialize the value-handle array. */
653 threadedge_initialize_values ();
655 /* We need accurate information regarding back edges in the CFG
656 for jump threading; this may include back edges that are not part of
657 a single loop. */
658 mark_dfs_back_edges ();
660 /* Recursively walk the dominator tree optimizing statements. */
661 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
664 gimple_stmt_iterator gsi;
665 basic_block bb;
666 FOR_EACH_BB (bb)
667 {for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
668 update_stmt_if_modified (gsi_stmt (gsi));
672 /* If we exposed any new variables, go ahead and put them into
673 SSA form now, before we handle jump threading. This simplifies
674 interactions between rewriting of _DECL nodes into SSA form
675 and rewriting SSA_NAME nodes into SSA form after block
676 duplication and CFG manipulation. */
677 update_ssa (TODO_update_ssa);
679 free_all_edge_infos ();
681 /* Thread jumps, creating duplicate blocks as needed. */
682 cfg_altered |= thread_through_all_blocks (first_pass_instance);
684 if (cfg_altered)
685 free_dominance_info (CDI_DOMINATORS);
687 /* Removal of statements may make some EH edges dead. Purge
688 such edges from the CFG as needed. */
689 if (!bitmap_empty_p (need_eh_cleanup))
691 unsigned i;
692 bitmap_iterator bi;
694 /* Jump threading may have created forwarder blocks from blocks
695 needing EH cleanup; the new successor of these blocks, which
696 has inherited from the original block, needs the cleanup. */
697 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
699 basic_block bb = BASIC_BLOCK (i);
700 if (single_succ_p (bb) == 1
701 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
703 bitmap_clear_bit (need_eh_cleanup, i);
704 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
708 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
709 bitmap_zero (need_eh_cleanup);
712 statistics_counter_event (cfun, "Redundant expressions eliminated",
713 opt_stats.num_re);
714 statistics_counter_event (cfun, "Constants propagated",
715 opt_stats.num_const_prop);
716 statistics_counter_event (cfun, "Copies propagated",
717 opt_stats.num_copy_prop);
719 /* Debugging dumps. */
720 if (dump_file && (dump_flags & TDF_STATS))
721 dump_dominator_optimization_stats (dump_file);
723 loop_optimizer_finalize ();
725 /* Delete our main hashtable. */
726 htab_delete (avail_exprs);
728 /* And finalize the dominator walker. */
729 fini_walk_dominator_tree (&walk_data);
731 /* Free asserted bitmaps and stacks. */
732 BITMAP_FREE (need_eh_cleanup);
734 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
735 VEC_free (tree, heap, const_and_copies_stack);
736 VEC_free (gimple, heap, stmts_to_rescan);
738 /* Free the value-handle array. */
739 threadedge_finalize_values ();
740 ssa_name_values = NULL;
742 return 0;
745 static bool
746 gate_dominator (void)
748 return flag_tree_dom != 0;
751 struct gimple_opt_pass pass_dominator =
754 GIMPLE_PASS,
755 "dom", /* name */
756 gate_dominator, /* gate */
757 tree_ssa_dominator_optimize, /* execute */
758 NULL, /* sub */
759 NULL, /* next */
760 0, /* static_pass_number */
761 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
762 PROP_cfg | PROP_ssa, /* properties_required */
763 0, /* properties_provided */
764 0, /* properties_destroyed */
765 0, /* todo_flags_start */
766 TODO_dump_func
767 | TODO_update_ssa
768 | TODO_cleanup_cfg
769 | TODO_verify_ssa /* todo_flags_finish */
774 /* Given a conditional statement CONDSTMT, convert the
775 condition to a canonical form. */
777 static void
778 canonicalize_comparison (gimple condstmt)
780 tree op0;
781 tree op1;
782 enum tree_code code;
784 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
786 op0 = gimple_cond_lhs (condstmt);
787 op1 = gimple_cond_rhs (condstmt);
789 code = gimple_cond_code (condstmt);
791 /* If it would be profitable to swap the operands, then do so to
792 canonicalize the statement, enabling better optimization.
794 By placing canonicalization of such expressions here we
795 transparently keep statements in canonical form, even
796 when the statement is modified. */
797 if (tree_swap_operands_p (op0, op1, false))
799 /* For relationals we need to swap the operands
800 and change the code. */
801 if (code == LT_EXPR
802 || code == GT_EXPR
803 || code == LE_EXPR
804 || code == GE_EXPR)
806 code = swap_tree_comparison (code);
808 gimple_cond_set_code (condstmt, code);
809 gimple_cond_set_lhs (condstmt, op1);
810 gimple_cond_set_rhs (condstmt, op0);
812 update_stmt (condstmt);
817 /* Initialize local stacks for this optimizer and record equivalences
818 upon entry to BB. Equivalences can come from the edge traversed to
819 reach BB or they may come from PHI nodes at the start of BB. */
821 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
822 LIMIT entries left in LOCALs. */
824 static void
825 remove_local_expressions_from_table (void)
827 /* Remove all the expressions made available in this block. */
828 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
830 struct expr_hash_elt element;
831 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
833 if (victim == NULL)
834 break;
836 element = *victim;
838 /* This must precede the actual removal from the hash table,
839 as ELEMENT and the table entry may share a call argument
840 vector which will be freed during removal. */
841 if (dump_file && (dump_flags & TDF_DETAILS))
843 fprintf (dump_file, "<<<< ");
844 print_expr_hash_elt (dump_file, &element);
847 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
851 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
852 CONST_AND_COPIES to its original state, stopping when we hit a
853 NULL marker. */
855 static void
856 restore_vars_to_original_value (void)
858 while (VEC_length (tree, const_and_copies_stack) > 0)
860 tree prev_value, dest;
862 dest = VEC_pop (tree, const_and_copies_stack);
864 if (dest == NULL)
865 break;
867 if (dump_file && (dump_flags & TDF_DETAILS))
869 fprintf (dump_file, "<<<< COPY ");
870 print_generic_expr (dump_file, dest, 0);
871 fprintf (dump_file, " = ");
872 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
873 fprintf (dump_file, "\n");
876 prev_value = VEC_pop (tree, const_and_copies_stack);
877 set_ssa_name_value (dest, prev_value);
881 /* A trivial wrapper so that we can present the generic jump
882 threading code with a simple API for simplifying statements. */
883 static tree
884 simplify_stmt_for_jump_threading (gimple stmt,
885 gimple within_stmt ATTRIBUTE_UNUSED)
887 return lookup_avail_expr (stmt, false);
890 /* Wrapper for common code to attempt to thread an edge. For example,
891 it handles lazily building the dummy condition and the bookkeeping
892 when jump threading is successful. */
894 static void
895 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
897 if (! walk_data->global_data)
899 gimple dummy_cond =
900 gimple_build_cond (NE_EXPR,
901 integer_zero_node, integer_zero_node,
902 NULL, NULL);
903 walk_data->global_data = dummy_cond;
906 thread_across_edge ((gimple) walk_data->global_data, e, false,
907 &const_and_copies_stack,
908 simplify_stmt_for_jump_threading);
911 /* PHI nodes can create equivalences too.
913 Ignoring any alternatives which are the same as the result, if
914 all the alternatives are equal, then the PHI node creates an
915 equivalence. */
917 static void
918 record_equivalences_from_phis (basic_block bb)
920 gimple_stmt_iterator gsi;
922 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
924 gimple phi = gsi_stmt (gsi);
926 tree lhs = gimple_phi_result (phi);
927 tree rhs = NULL;
928 size_t i;
930 for (i = 0; i < gimple_phi_num_args (phi); i++)
932 tree t = gimple_phi_arg_def (phi, i);
934 /* Ignore alternatives which are the same as our LHS. Since
935 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
936 can simply compare pointers. */
937 if (lhs == t)
938 continue;
940 /* If we have not processed an alternative yet, then set
941 RHS to this alternative. */
942 if (rhs == NULL)
943 rhs = t;
944 /* If we have processed an alternative (stored in RHS), then
945 see if it is equal to this one. If it isn't, then stop
946 the search. */
947 else if (! operand_equal_for_phi_arg_p (rhs, t))
948 break;
951 /* If we had no interesting alternatives, then all the RHS alternatives
952 must have been the same as LHS. */
953 if (!rhs)
954 rhs = lhs;
956 /* If we managed to iterate through each PHI alternative without
957 breaking out of the loop, then we have a PHI which may create
958 a useful equivalence. We do not need to record unwind data for
959 this, since this is a true assignment and not an equivalence
960 inferred from a comparison. All uses of this ssa name are dominated
961 by this assignment, so unwinding just costs time and space. */
962 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
963 set_ssa_name_value (lhs, rhs);
967 /* Ignoring loop backedges, if BB has precisely one incoming edge then
968 return that edge. Otherwise return NULL. */
969 static edge
970 single_incoming_edge_ignoring_loop_edges (basic_block bb)
972 edge retval = NULL;
973 edge e;
974 edge_iterator ei;
976 FOR_EACH_EDGE (e, ei, bb->preds)
978 /* A loop back edge can be identified by the destination of
979 the edge dominating the source of the edge. */
980 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
981 continue;
983 /* If we have already seen a non-loop edge, then we must have
984 multiple incoming non-loop edges and thus we return NULL. */
985 if (retval)
986 return NULL;
988 /* This is the first non-loop incoming edge we have found. Record
989 it. */
990 retval = e;
993 return retval;
996 /* Record any equivalences created by the incoming edge to BB. If BB
997 has more than one incoming edge, then no equivalence is created. */
999 static void
1000 record_equivalences_from_incoming_edge (basic_block bb)
1002 edge e;
1003 basic_block parent;
1004 struct edge_info *edge_info;
1006 /* If our parent block ended with a control statement, then we may be
1007 able to record some equivalences based on which outgoing edge from
1008 the parent was followed. */
1009 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1011 e = single_incoming_edge_ignoring_loop_edges (bb);
1013 /* If we had a single incoming edge from our parent block, then enter
1014 any data associated with the edge into our tables. */
1015 if (e && e->src == parent)
1017 unsigned int i;
1019 edge_info = (struct edge_info *) e->aux;
1021 if (edge_info)
1023 tree lhs = edge_info->lhs;
1024 tree rhs = edge_info->rhs;
1025 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1027 if (lhs)
1028 record_equality (lhs, rhs);
1030 if (cond_equivalences)
1031 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1032 record_cond (&cond_equivalences[i]);
1037 /* Dump SSA statistics on FILE. */
1039 void
1040 dump_dominator_optimization_stats (FILE *file)
1042 fprintf (file, "Total number of statements: %6ld\n\n",
1043 opt_stats.num_stmts);
1044 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1045 opt_stats.num_exprs_considered);
1047 fprintf (file, "\nHash table statistics:\n");
1049 fprintf (file, " avail_exprs: ");
1050 htab_statistics (file, avail_exprs);
1054 /* Dump SSA statistics on stderr. */
1056 void
1057 debug_dominator_optimization_stats (void)
1059 dump_dominator_optimization_stats (stderr);
1063 /* Dump statistics for the hash table HTAB. */
1065 static void
1066 htab_statistics (FILE *file, htab_t htab)
1068 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1069 (long) htab_size (htab),
1070 (long) htab_elements (htab),
1071 htab_collisions (htab));
1075 /* Enter condition equivalence into the expression hash table.
1076 This indicates that a conditional expression has a known
1077 boolean value. */
1079 static void
1080 record_cond (struct cond_equivalence *p)
1082 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1083 void **slot;
1085 initialize_hash_element_from_expr (&p->cond, p->value, element);
1087 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1088 element->hash, INSERT);
1089 if (*slot == NULL)
1091 *slot = (void *) element;
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1095 fprintf (dump_file, "1>>> ");
1096 print_expr_hash_elt (dump_file, element);
1099 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1101 else
1102 free (element);
1105 /* Build a cond_equivalence record indicating that the comparison
1106 CODE holds between operands OP0 and OP1. */
1108 static void
1109 build_and_record_new_cond (enum tree_code code,
1110 tree op0, tree op1,
1111 struct cond_equivalence *p)
1113 struct hashable_expr *cond = &p->cond;
1115 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1117 cond->type = boolean_type_node;
1118 cond->kind = EXPR_BINARY;
1119 cond->ops.binary.op = code;
1120 cond->ops.binary.opnd0 = op0;
1121 cond->ops.binary.opnd1 = op1;
1123 p->value = boolean_true_node;
1126 /* Record that COND is true and INVERTED is false into the edge information
1127 structure. Also record that any conditions dominated by COND are true
1128 as well.
1130 For example, if a < b is true, then a <= b must also be true. */
1132 static void
1133 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1135 tree op0, op1;
1137 if (!COMPARISON_CLASS_P (cond))
1138 return;
1140 op0 = TREE_OPERAND (cond, 0);
1141 op1 = TREE_OPERAND (cond, 1);
1143 switch (TREE_CODE (cond))
1145 case LT_EXPR:
1146 case GT_EXPR:
1147 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1149 edge_info->max_cond_equivalences = 6;
1150 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 6);
1151 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1152 &edge_info->cond_equivalences[4]);
1153 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1154 &edge_info->cond_equivalences[5]);
1156 else
1158 edge_info->max_cond_equivalences = 4;
1159 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1162 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1163 ? LE_EXPR : GE_EXPR),
1164 op0, op1, &edge_info->cond_equivalences[2]);
1165 build_and_record_new_cond (NE_EXPR, op0, op1,
1166 &edge_info->cond_equivalences[3]);
1167 break;
1169 case GE_EXPR:
1170 case LE_EXPR:
1171 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1173 edge_info->max_cond_equivalences = 3;
1174 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 3);
1175 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1176 &edge_info->cond_equivalences[2]);
1178 else
1180 edge_info->max_cond_equivalences = 2;
1181 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1183 break;
1185 case EQ_EXPR:
1186 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1188 edge_info->max_cond_equivalences = 5;
1189 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 5);
1190 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1191 &edge_info->cond_equivalences[4]);
1193 else
1195 edge_info->max_cond_equivalences = 4;
1196 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1198 build_and_record_new_cond (LE_EXPR, op0, op1,
1199 &edge_info->cond_equivalences[2]);
1200 build_and_record_new_cond (GE_EXPR, op0, op1,
1201 &edge_info->cond_equivalences[3]);
1202 break;
1204 case UNORDERED_EXPR:
1205 edge_info->max_cond_equivalences = 8;
1206 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 8);
1207 build_and_record_new_cond (NE_EXPR, op0, op1,
1208 &edge_info->cond_equivalences[2]);
1209 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1210 &edge_info->cond_equivalences[3]);
1211 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1212 &edge_info->cond_equivalences[4]);
1213 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1214 &edge_info->cond_equivalences[5]);
1215 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1216 &edge_info->cond_equivalences[6]);
1217 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1218 &edge_info->cond_equivalences[7]);
1219 break;
1221 case UNLT_EXPR:
1222 case UNGT_EXPR:
1223 edge_info->max_cond_equivalences = 4;
1224 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1225 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1226 ? UNLE_EXPR : UNGE_EXPR),
1227 op0, op1, &edge_info->cond_equivalences[2]);
1228 build_and_record_new_cond (NE_EXPR, op0, op1,
1229 &edge_info->cond_equivalences[3]);
1230 break;
1232 case UNEQ_EXPR:
1233 edge_info->max_cond_equivalences = 4;
1234 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1235 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1236 &edge_info->cond_equivalences[2]);
1237 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1238 &edge_info->cond_equivalences[3]);
1239 break;
1241 case LTGT_EXPR:
1242 edge_info->max_cond_equivalences = 4;
1243 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1244 build_and_record_new_cond (NE_EXPR, op0, op1,
1245 &edge_info->cond_equivalences[2]);
1246 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1247 &edge_info->cond_equivalences[3]);
1248 break;
1250 default:
1251 edge_info->max_cond_equivalences = 2;
1252 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1253 break;
1256 /* Now store the original true and false conditions into the first
1257 two slots. */
1258 initialize_expr_from_cond (cond, &edge_info->cond_equivalences[0].cond);
1259 edge_info->cond_equivalences[0].value = boolean_true_node;
1261 /* It is possible for INVERTED to be the negation of a comparison,
1262 and not a valid RHS or GIMPLE_COND condition. This happens because
1263 invert_truthvalue may return such an expression when asked to invert
1264 a floating-point comparison. These comparisons are not assumed to
1265 obey the trichotomy law. */
1266 initialize_expr_from_cond (inverted, &edge_info->cond_equivalences[1].cond);
1267 edge_info->cond_equivalences[1].value = boolean_false_node;
1270 /* A helper function for record_const_or_copy and record_equality.
1271 Do the work of recording the value and undo info. */
1273 static void
1274 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1276 set_ssa_name_value (x, y);
1278 if (dump_file && (dump_flags & TDF_DETAILS))
1280 fprintf (dump_file, "0>>> COPY ");
1281 print_generic_expr (dump_file, x, 0);
1282 fprintf (dump_file, " = ");
1283 print_generic_expr (dump_file, y, 0);
1284 fprintf (dump_file, "\n");
1287 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1288 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1289 VEC_quick_push (tree, const_and_copies_stack, x);
1292 /* Return the loop depth of the basic block of the defining statement of X.
1293 This number should not be treated as absolutely correct because the loop
1294 information may not be completely up-to-date when dom runs. However, it
1295 will be relatively correct, and as more passes are taught to keep loop info
1296 up to date, the result will become more and more accurate. */
1299 loop_depth_of_name (tree x)
1301 gimple defstmt;
1302 basic_block defbb;
1304 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1305 if (TREE_CODE (x) != SSA_NAME)
1306 return 0;
1308 /* Otherwise return the loop depth of the defining statement's bb.
1309 Note that there may not actually be a bb for this statement, if the
1310 ssa_name is live on entry. */
1311 defstmt = SSA_NAME_DEF_STMT (x);
1312 defbb = gimple_bb (defstmt);
1313 if (!defbb)
1314 return 0;
1316 return defbb->loop_depth;
1319 /* Record that X is equal to Y in const_and_copies. Record undo
1320 information in the block-local vector. */
1322 static void
1323 record_const_or_copy (tree x, tree y)
1325 tree prev_x = SSA_NAME_VALUE (x);
1327 gcc_assert (TREE_CODE (x) == SSA_NAME);
1329 if (TREE_CODE (y) == SSA_NAME)
1331 tree tmp = SSA_NAME_VALUE (y);
1332 if (tmp)
1333 y = tmp;
1336 record_const_or_copy_1 (x, y, prev_x);
1339 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1340 This constrains the cases in which we may treat this as assignment. */
1342 static void
1343 record_equality (tree x, tree y)
1345 tree prev_x = NULL, prev_y = NULL;
1347 if (TREE_CODE (x) == SSA_NAME)
1348 prev_x = SSA_NAME_VALUE (x);
1349 if (TREE_CODE (y) == SSA_NAME)
1350 prev_y = SSA_NAME_VALUE (y);
1352 /* If one of the previous values is invariant, or invariant in more loops
1353 (by depth), then use that.
1354 Otherwise it doesn't matter which value we choose, just so
1355 long as we canonicalize on one value. */
1356 if (is_gimple_min_invariant (y))
1358 else if (is_gimple_min_invariant (x)
1359 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1360 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1361 else if (prev_x && is_gimple_min_invariant (prev_x))
1362 x = y, y = prev_x, prev_x = prev_y;
1363 else if (prev_y)
1364 y = prev_y;
1366 /* After the swapping, we must have one SSA_NAME. */
1367 if (TREE_CODE (x) != SSA_NAME)
1368 return;
1370 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1371 variable compared against zero. If we're honoring signed zeros,
1372 then we cannot record this value unless we know that the value is
1373 nonzero. */
1374 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1375 && (TREE_CODE (y) != REAL_CST
1376 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1377 return;
1379 record_const_or_copy_1 (x, y, prev_x);
1382 /* Returns true when STMT is a simple iv increment. It detects the
1383 following situation:
1385 i_1 = phi (..., i_2)
1386 i_2 = i_1 +/- ... */
1388 static bool
1389 simple_iv_increment_p (gimple stmt)
1391 tree lhs, preinc;
1392 gimple phi;
1393 size_t i;
1395 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1396 return false;
1398 lhs = gimple_assign_lhs (stmt);
1399 if (TREE_CODE (lhs) != SSA_NAME)
1400 return false;
1402 if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
1403 && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
1404 return false;
1406 preinc = gimple_assign_rhs1 (stmt);
1408 if (TREE_CODE (preinc) != SSA_NAME)
1409 return false;
1411 phi = SSA_NAME_DEF_STMT (preinc);
1412 if (gimple_code (phi) != GIMPLE_PHI)
1413 return false;
1415 for (i = 0; i < gimple_phi_num_args (phi); i++)
1416 if (gimple_phi_arg_def (phi, i) == lhs)
1417 return true;
1419 return false;
1422 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1423 known value for that SSA_NAME (or NULL if no value is known).
1425 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1426 successors of BB. */
1428 static void
1429 cprop_into_successor_phis (basic_block bb)
1431 edge e;
1432 edge_iterator ei;
1434 FOR_EACH_EDGE (e, ei, bb->succs)
1436 int indx;
1437 gimple_stmt_iterator gsi;
1439 /* If this is an abnormal edge, then we do not want to copy propagate
1440 into the PHI alternative associated with this edge. */
1441 if (e->flags & EDGE_ABNORMAL)
1442 continue;
1444 gsi = gsi_start_phis (e->dest);
1445 if (gsi_end_p (gsi))
1446 continue;
1448 indx = e->dest_idx;
1449 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1451 tree new_val;
1452 use_operand_p orig_p;
1453 tree orig_val;
1454 gimple phi = gsi_stmt (gsi);
1456 /* The alternative may be associated with a constant, so verify
1457 it is an SSA_NAME before doing anything with it. */
1458 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1459 orig_val = get_use_from_ptr (orig_p);
1460 if (TREE_CODE (orig_val) != SSA_NAME)
1461 continue;
1463 /* If we have *ORIG_P in our constant/copy table, then replace
1464 ORIG_P with its value in our constant/copy table. */
1465 new_val = SSA_NAME_VALUE (orig_val);
1466 if (new_val
1467 && new_val != orig_val
1468 && (TREE_CODE (new_val) == SSA_NAME
1469 || is_gimple_min_invariant (new_val))
1470 && may_propagate_copy (orig_val, new_val))
1471 propagate_value (orig_p, new_val);
1476 /* We have finished optimizing BB, record any information implied by
1477 taking a specific outgoing edge from BB. */
1479 static void
1480 record_edge_info (basic_block bb)
1482 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1483 struct edge_info *edge_info;
1485 if (! gsi_end_p (gsi))
1487 gimple stmt = gsi_stmt (gsi);
1488 location_t loc = gimple_location (stmt);
1490 if (gimple_code (stmt) == GIMPLE_SWITCH)
1492 tree index = gimple_switch_index (stmt);
1494 if (TREE_CODE (index) == SSA_NAME)
1496 int i;
1497 int n_labels = gimple_switch_num_labels (stmt);
1498 tree *info = XCNEWVEC (tree, last_basic_block);
1499 edge e;
1500 edge_iterator ei;
1502 for (i = 0; i < n_labels; i++)
1504 tree label = gimple_switch_label (stmt, i);
1505 basic_block target_bb = label_to_block (CASE_LABEL (label));
1506 if (CASE_HIGH (label)
1507 || !CASE_LOW (label)
1508 || info[target_bb->index])
1509 info[target_bb->index] = error_mark_node;
1510 else
1511 info[target_bb->index] = label;
1514 FOR_EACH_EDGE (e, ei, bb->succs)
1516 basic_block target_bb = e->dest;
1517 tree label = info[target_bb->index];
1519 if (label != NULL && label != error_mark_node)
1521 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1522 CASE_LOW (label));
1523 edge_info = allocate_edge_info (e);
1524 edge_info->lhs = index;
1525 edge_info->rhs = x;
1528 free (info);
1532 /* A COND_EXPR may create equivalences too. */
1533 if (gimple_code (stmt) == GIMPLE_COND)
1535 edge true_edge;
1536 edge false_edge;
1538 tree op0 = gimple_cond_lhs (stmt);
1539 tree op1 = gimple_cond_rhs (stmt);
1540 enum tree_code code = gimple_cond_code (stmt);
1542 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1544 /* Special case comparing booleans against a constant as we
1545 know the value of OP0 on both arms of the branch. i.e., we
1546 can record an equivalence for OP0 rather than COND. */
1547 if ((code == EQ_EXPR || code == NE_EXPR)
1548 && TREE_CODE (op0) == SSA_NAME
1549 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1550 && is_gimple_min_invariant (op1))
1552 if (code == EQ_EXPR)
1554 edge_info = allocate_edge_info (true_edge);
1555 edge_info->lhs = op0;
1556 edge_info->rhs = (integer_zerop (op1)
1557 ? boolean_false_node
1558 : boolean_true_node);
1560 edge_info = allocate_edge_info (false_edge);
1561 edge_info->lhs = op0;
1562 edge_info->rhs = (integer_zerop (op1)
1563 ? boolean_true_node
1564 : boolean_false_node);
1566 else
1568 edge_info = allocate_edge_info (true_edge);
1569 edge_info->lhs = op0;
1570 edge_info->rhs = (integer_zerop (op1)
1571 ? boolean_true_node
1572 : boolean_false_node);
1574 edge_info = allocate_edge_info (false_edge);
1575 edge_info->lhs = op0;
1576 edge_info->rhs = (integer_zerop (op1)
1577 ? boolean_false_node
1578 : boolean_true_node);
1581 else if (is_gimple_min_invariant (op0)
1582 && (TREE_CODE (op1) == SSA_NAME
1583 || is_gimple_min_invariant (op1)))
1585 tree cond = build2 (code, boolean_type_node, op0, op1);
1586 tree inverted = invert_truthvalue_loc (loc, cond);
1587 struct edge_info *edge_info;
1589 edge_info = allocate_edge_info (true_edge);
1590 record_conditions (edge_info, cond, inverted);
1592 if (code == EQ_EXPR)
1594 edge_info->lhs = op1;
1595 edge_info->rhs = op0;
1598 edge_info = allocate_edge_info (false_edge);
1599 record_conditions (edge_info, inverted, cond);
1601 if (code == NE_EXPR)
1603 edge_info->lhs = op1;
1604 edge_info->rhs = op0;
1608 else if (TREE_CODE (op0) == SSA_NAME
1609 && (is_gimple_min_invariant (op1)
1610 || TREE_CODE (op1) == SSA_NAME))
1612 tree cond = build2 (code, boolean_type_node, op0, op1);
1613 tree inverted = invert_truthvalue_loc (loc, cond);
1614 struct edge_info *edge_info;
1616 edge_info = allocate_edge_info (true_edge);
1617 record_conditions (edge_info, cond, inverted);
1619 if (code == EQ_EXPR)
1621 edge_info->lhs = op0;
1622 edge_info->rhs = op1;
1625 edge_info = allocate_edge_info (false_edge);
1626 record_conditions (edge_info, inverted, cond);
1628 if (TREE_CODE (cond) == NE_EXPR)
1630 edge_info->lhs = op0;
1631 edge_info->rhs = op1;
1636 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1640 static void
1641 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1642 basic_block bb)
1644 gimple_stmt_iterator gsi;
1646 if (dump_file && (dump_flags & TDF_DETAILS))
1647 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1649 /* Push a marker on the stacks of local information so that we know how
1650 far to unwind when we finalize this block. */
1651 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1652 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1654 record_equivalences_from_incoming_edge (bb);
1656 /* PHI nodes can create equivalences too. */
1657 record_equivalences_from_phis (bb);
1659 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1660 optimize_stmt (bb, gsi);
1662 /* Now prepare to process dominated blocks. */
1663 record_edge_info (bb);
1664 cprop_into_successor_phis (bb);
1667 /* We have finished processing the dominator children of BB, perform
1668 any finalization actions in preparation for leaving this node in
1669 the dominator tree. */
1671 static void
1672 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1674 gimple last;
1676 /* If we have an outgoing edge to a block with multiple incoming and
1677 outgoing edges, then we may be able to thread the edge, i.e., we
1678 may be able to statically determine which of the outgoing edges
1679 will be traversed when the incoming edge from BB is traversed. */
1680 if (single_succ_p (bb)
1681 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1682 && potentially_threadable_block (single_succ (bb)))
1684 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1686 else if ((last = last_stmt (bb))
1687 && gimple_code (last) == GIMPLE_COND
1688 && EDGE_COUNT (bb->succs) == 2
1689 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1690 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1692 edge true_edge, false_edge;
1694 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1696 /* Only try to thread the edge if it reaches a target block with
1697 more than one predecessor and more than one successor. */
1698 if (potentially_threadable_block (true_edge->dest))
1700 struct edge_info *edge_info;
1701 unsigned int i;
1703 /* Push a marker onto the available expression stack so that we
1704 unwind any expressions related to the TRUE arm before processing
1705 the false arm below. */
1706 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1707 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1709 edge_info = (struct edge_info *) true_edge->aux;
1711 /* If we have info associated with this edge, record it into
1712 our equivalence tables. */
1713 if (edge_info)
1715 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1716 tree lhs = edge_info->lhs;
1717 tree rhs = edge_info->rhs;
1719 /* If we have a simple NAME = VALUE equivalence, record it. */
1720 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1721 record_const_or_copy (lhs, rhs);
1723 /* If we have 0 = COND or 1 = COND equivalences, record them
1724 into our expression hash tables. */
1725 if (cond_equivalences)
1726 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1727 record_cond (&cond_equivalences[i]);
1730 dom_thread_across_edge (walk_data, true_edge);
1732 /* And restore the various tables to their state before
1733 we threaded this edge. */
1734 remove_local_expressions_from_table ();
1737 /* Similarly for the ELSE arm. */
1738 if (potentially_threadable_block (false_edge->dest))
1740 struct edge_info *edge_info;
1741 unsigned int i;
1743 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1744 edge_info = (struct edge_info *) false_edge->aux;
1746 /* If we have info associated with this edge, record it into
1747 our equivalence tables. */
1748 if (edge_info)
1750 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1751 tree lhs = edge_info->lhs;
1752 tree rhs = edge_info->rhs;
1754 /* If we have a simple NAME = VALUE equivalence, record it. */
1755 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1756 record_const_or_copy (lhs, rhs);
1758 /* If we have 0 = COND or 1 = COND equivalences, record them
1759 into our expression hash tables. */
1760 if (cond_equivalences)
1761 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1762 record_cond (&cond_equivalences[i]);
1765 /* Now thread the edge. */
1766 dom_thread_across_edge (walk_data, false_edge);
1768 /* No need to remove local expressions from our tables
1769 or restore vars to their original value as that will
1770 be done immediately below. */
1774 remove_local_expressions_from_table ();
1775 restore_vars_to_original_value ();
1777 /* If we queued any statements to rescan in this block, then
1778 go ahead and rescan them now. */
1779 while (VEC_length (gimple, stmts_to_rescan) > 0)
1781 gimple stmt = VEC_last (gimple, stmts_to_rescan);
1782 basic_block stmt_bb = gimple_bb (stmt);
1784 if (stmt_bb != bb)
1785 break;
1787 VEC_pop (gimple, stmts_to_rescan);
1788 update_stmt (stmt);
1792 /* Search for redundant computations in STMT. If any are found, then
1793 replace them with the variable holding the result of the computation.
1795 If safe, record this expression into the available expression hash
1796 table. */
1798 static bool
1799 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1801 tree expr_type;
1802 tree cached_lhs;
1803 bool insert = true;
1804 bool retval = false;
1805 bool assigns_var_p = false;
1807 gimple stmt = gsi_stmt (*gsi);
1809 tree def = gimple_get_lhs (stmt);
1811 /* Certain expressions on the RHS can be optimized away, but can not
1812 themselves be entered into the hash tables. */
1813 if (! def
1814 || TREE_CODE (def) != SSA_NAME
1815 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1816 || gimple_vdef (stmt)
1817 /* Do not record equivalences for increments of ivs. This would create
1818 overlapping live ranges for a very questionable gain. */
1819 || simple_iv_increment_p (stmt))
1820 insert = false;
1822 /* Check if the expression has been computed before. */
1823 cached_lhs = lookup_avail_expr (stmt, insert);
1825 opt_stats.num_exprs_considered++;
1827 /* Get the type of the expression we are trying to optimize. */
1828 if (is_gimple_assign (stmt))
1830 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1831 assigns_var_p = true;
1833 else if (gimple_code (stmt) == GIMPLE_COND)
1834 expr_type = boolean_type_node;
1835 else if (is_gimple_call (stmt))
1837 gcc_assert (gimple_call_lhs (stmt));
1838 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1839 assigns_var_p = true;
1841 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1842 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1843 else
1844 gcc_unreachable ();
1846 if (!cached_lhs)
1847 return false;
1849 /* It is safe to ignore types here since we have already done
1850 type checking in the hashing and equality routines. In fact
1851 type checking here merely gets in the way of constant
1852 propagation. Also, make sure that it is safe to propagate
1853 CACHED_LHS into the expression in STMT. */
1854 if ((TREE_CODE (cached_lhs) != SSA_NAME
1855 && (assigns_var_p
1856 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1857 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1859 #if defined ENABLE_CHECKING
1860 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1861 || is_gimple_min_invariant (cached_lhs));
1862 #endif
1864 if (dump_file && (dump_flags & TDF_DETAILS))
1866 fprintf (dump_file, " Replaced redundant expr '");
1867 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1868 fprintf (dump_file, "' with '");
1869 print_generic_expr (dump_file, cached_lhs, dump_flags);
1870 fprintf (dump_file, "'\n");
1873 opt_stats.num_re++;
1875 if (TREE_CODE (cached_lhs) == ADDR_EXPR
1876 || (POINTER_TYPE_P (expr_type)
1877 && is_gimple_min_invariant (cached_lhs)))
1878 retval = true;
1880 if (assigns_var_p
1881 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1882 cached_lhs = fold_convert (expr_type, cached_lhs);
1884 propagate_tree_value_into_stmt (gsi, cached_lhs);
1886 /* Since it is always necessary to mark the result as modified,
1887 perhaps we should move this into propagate_tree_value_into_stmt
1888 itself. */
1889 gimple_set_modified (gsi_stmt (*gsi), true);
1891 return retval;
1894 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1895 the available expressions table or the const_and_copies table.
1896 Detect and record those equivalences. */
1897 /* We handle only very simple copy equivalences here. The heavy
1898 lifing is done by eliminate_redundant_computations. */
1900 static void
1901 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1903 tree lhs;
1904 enum tree_code lhs_code;
1906 gcc_assert (is_gimple_assign (stmt));
1908 lhs = gimple_assign_lhs (stmt);
1909 lhs_code = TREE_CODE (lhs);
1911 if (lhs_code == SSA_NAME
1912 && gimple_assign_single_p (stmt))
1914 tree rhs = gimple_assign_rhs1 (stmt);
1916 /* If the RHS of the assignment is a constant or another variable that
1917 may be propagated, register it in the CONST_AND_COPIES table. We
1918 do not need to record unwind data for this, since this is a true
1919 assignment and not an equivalence inferred from a comparison. All
1920 uses of this ssa name are dominated by this assignment, so unwinding
1921 just costs time and space. */
1922 if (may_optimize_p
1923 && (TREE_CODE (rhs) == SSA_NAME
1924 || is_gimple_min_invariant (rhs)))
1926 if (dump_file && (dump_flags & TDF_DETAILS))
1928 fprintf (dump_file, "==== ASGN ");
1929 print_generic_expr (dump_file, lhs, 0);
1930 fprintf (dump_file, " = ");
1931 print_generic_expr (dump_file, rhs, 0);
1932 fprintf (dump_file, "\n");
1935 set_ssa_name_value (lhs, rhs);
1939 /* A memory store, even an aliased store, creates a useful
1940 equivalence. By exchanging the LHS and RHS, creating suitable
1941 vops and recording the result in the available expression table,
1942 we may be able to expose more redundant loads. */
1943 if (!gimple_has_volatile_ops (stmt)
1944 && gimple_references_memory_p (stmt)
1945 && gimple_assign_single_p (stmt)
1946 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1947 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1948 && !is_gimple_reg (lhs))
1950 tree rhs = gimple_assign_rhs1 (stmt);
1951 gimple new_stmt;
1953 /* Build a new statement with the RHS and LHS exchanged. */
1954 if (TREE_CODE (rhs) == SSA_NAME)
1956 /* NOTE tuples. The call to gimple_build_assign below replaced
1957 a call to build_gimple_modify_stmt, which did not set the
1958 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1959 may cause an SSA validation failure, as the LHS may be a
1960 default-initialized name and should have no definition. I'm
1961 a bit dubious of this, as the artificial statement that we
1962 generate here may in fact be ill-formed, but it is simply
1963 used as an internal device in this pass, and never becomes
1964 part of the CFG. */
1965 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1966 new_stmt = gimple_build_assign (rhs, lhs);
1967 SSA_NAME_DEF_STMT (rhs) = defstmt;
1969 else
1970 new_stmt = gimple_build_assign (rhs, lhs);
1972 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1974 /* Finally enter the statement into the available expression
1975 table. */
1976 lookup_avail_expr (new_stmt, true);
1980 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1981 CONST_AND_COPIES. */
1983 static bool
1984 cprop_operand (gimple stmt, use_operand_p op_p)
1986 bool may_have_exposed_new_symbols = false;
1987 tree val;
1988 tree op = USE_FROM_PTR (op_p);
1990 /* If the operand has a known constant value or it is known to be a
1991 copy of some other variable, use the value or copy stored in
1992 CONST_AND_COPIES. */
1993 val = SSA_NAME_VALUE (op);
1994 if (val && val != op)
1996 /* Do not change the base variable in the virtual operand
1997 tables. That would make it impossible to reconstruct
1998 the renamed virtual operand if we later modify this
1999 statement. Also only allow the new value to be an SSA_NAME
2000 for propagation into virtual operands. */
2001 if (!is_gimple_reg (op)
2002 && (TREE_CODE (val) != SSA_NAME
2003 || is_gimple_reg (val)
2004 || get_virtual_var (val) != get_virtual_var (op)))
2005 return false;
2007 /* Do not replace hard register operands in asm statements. */
2008 if (gimple_code (stmt) == GIMPLE_ASM
2009 && !may_propagate_copy_into_asm (op))
2010 return false;
2012 /* Certain operands are not allowed to be copy propagated due
2013 to their interaction with exception handling and some GCC
2014 extensions. */
2015 if (!may_propagate_copy (op, val))
2016 return false;
2018 /* Do not propagate addresses that point to volatiles into memory
2019 stmts without volatile operands. */
2020 if (POINTER_TYPE_P (TREE_TYPE (val))
2021 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2022 && gimple_has_mem_ops (stmt)
2023 && !gimple_has_volatile_ops (stmt))
2024 return false;
2026 /* Do not propagate copies if the propagated value is at a deeper loop
2027 depth than the propagatee. Otherwise, this may move loop variant
2028 variables outside of their loops and prevent coalescing
2029 opportunities. If the value was loop invariant, it will be hoisted
2030 by LICM and exposed for copy propagation. */
2031 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2032 return false;
2034 /* Dump details. */
2035 if (dump_file && (dump_flags & TDF_DETAILS))
2037 fprintf (dump_file, " Replaced '");
2038 print_generic_expr (dump_file, op, dump_flags);
2039 fprintf (dump_file, "' with %s '",
2040 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2041 print_generic_expr (dump_file, val, dump_flags);
2042 fprintf (dump_file, "'\n");
2045 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2046 that we may have exposed a new symbol for SSA renaming. */
2047 if (TREE_CODE (val) == ADDR_EXPR
2048 || (POINTER_TYPE_P (TREE_TYPE (op))
2049 && is_gimple_min_invariant (val)))
2050 may_have_exposed_new_symbols = true;
2052 if (TREE_CODE (val) != SSA_NAME)
2053 opt_stats.num_const_prop++;
2054 else
2055 opt_stats.num_copy_prop++;
2057 propagate_value (op_p, val);
2059 /* And note that we modified this statement. This is now
2060 safe, even if we changed virtual operands since we will
2061 rescan the statement and rewrite its operands again. */
2062 gimple_set_modified (stmt, true);
2064 return may_have_exposed_new_symbols;
2067 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2068 known value for that SSA_NAME (or NULL if no value is known).
2070 Propagate values from CONST_AND_COPIES into the uses, vuses and
2071 vdef_ops of STMT. */
2073 static bool
2074 cprop_into_stmt (gimple stmt)
2076 bool may_have_exposed_new_symbols = false;
2077 use_operand_p op_p;
2078 ssa_op_iter iter;
2080 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2082 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2083 may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
2086 return may_have_exposed_new_symbols;
2089 /* Optimize the statement pointed to by iterator SI.
2091 We try to perform some simplistic global redundancy elimination and
2092 constant propagation:
2094 1- To detect global redundancy, we keep track of expressions that have
2095 been computed in this block and its dominators. If we find that the
2096 same expression is computed more than once, we eliminate repeated
2097 computations by using the target of the first one.
2099 2- Constant values and copy assignments. This is used to do very
2100 simplistic constant and copy propagation. When a constant or copy
2101 assignment is found, we map the value on the RHS of the assignment to
2102 the variable in the LHS in the CONST_AND_COPIES table. */
2104 static void
2105 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2107 gimple stmt, old_stmt;
2108 bool may_optimize_p;
2109 bool may_have_exposed_new_symbols;
2110 bool modified_p = false;
2112 old_stmt = stmt = gsi_stmt (si);
2114 if (gimple_code (stmt) == GIMPLE_COND)
2115 canonicalize_comparison (stmt);
2117 update_stmt_if_modified (stmt);
2118 opt_stats.num_stmts++;
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2122 fprintf (dump_file, "Optimizing statement ");
2123 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2126 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2127 may_have_exposed_new_symbols = cprop_into_stmt (stmt);
2129 /* If the statement has been modified with constant replacements,
2130 fold its RHS before checking for redundant computations. */
2131 if (gimple_modified_p (stmt))
2133 tree rhs = NULL;
2135 /* Try to fold the statement making sure that STMT is kept
2136 up to date. */
2137 if (fold_stmt (&si))
2139 stmt = gsi_stmt (si);
2141 if (dump_file && (dump_flags & TDF_DETAILS))
2143 fprintf (dump_file, " Folded to: ");
2144 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2148 /* We only need to consider cases that can yield a gimple operand. */
2149 if (gimple_assign_single_p (stmt))
2150 rhs = gimple_assign_rhs1 (stmt);
2151 else if (gimple_code (stmt) == GIMPLE_GOTO)
2152 rhs = gimple_goto_dest (stmt);
2153 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2154 /* This should never be an ADDR_EXPR. */
2155 rhs = gimple_switch_index (stmt);
2157 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2158 recompute_tree_invariant_for_addr_expr (rhs);
2160 /* Constant/copy propagation above may change the set of
2161 virtual operands associated with this statement. Folding
2162 may remove the need for some virtual operands.
2164 Indicate we will need to rescan and rewrite the statement. */
2165 may_have_exposed_new_symbols = true;
2166 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2167 even if fold_stmt updated the stmt already and thus cleared
2168 gimple_modified_p flag on it. */
2169 modified_p = true;
2172 /* Check for redundant computations. Do this optimization only
2173 for assignments that have no volatile ops and conditionals. */
2174 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2175 && ((is_gimple_assign (stmt)
2176 && !gimple_rhs_has_side_effects (stmt))
2177 || (is_gimple_call (stmt)
2178 && gimple_call_lhs (stmt) != NULL_TREE
2179 && !gimple_rhs_has_side_effects (stmt))
2180 || gimple_code (stmt) == GIMPLE_COND
2181 || gimple_code (stmt) == GIMPLE_SWITCH));
2183 if (may_optimize_p)
2185 may_have_exposed_new_symbols |= eliminate_redundant_computations (&si);
2186 stmt = gsi_stmt (si);
2189 /* Record any additional equivalences created by this statement. */
2190 if (is_gimple_assign (stmt))
2191 record_equivalences_from_stmt (stmt, may_optimize_p);
2193 /* If STMT is a COND_EXPR and it was modified, then we may know
2194 where it goes. If that is the case, then mark the CFG as altered.
2196 This will cause us to later call remove_unreachable_blocks and
2197 cleanup_tree_cfg when it is safe to do so. It is not safe to
2198 clean things up here since removal of edges and such can trigger
2199 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2200 the manager.
2202 That's all fine and good, except that once SSA_NAMEs are released
2203 to the manager, we must not call create_ssa_name until all references
2204 to released SSA_NAMEs have been eliminated.
2206 All references to the deleted SSA_NAMEs can not be eliminated until
2207 we remove unreachable blocks.
2209 We can not remove unreachable blocks until after we have completed
2210 any queued jump threading.
2212 We can not complete any queued jump threads until we have taken
2213 appropriate variables out of SSA form. Taking variables out of
2214 SSA form can call create_ssa_name and thus we lose.
2216 Ultimately I suspect we're going to need to change the interface
2217 into the SSA_NAME manager. */
2218 if (gimple_modified_p (stmt) || modified_p)
2220 tree val = NULL;
2222 if (gimple_code (stmt) == GIMPLE_COND)
2223 val = fold_binary_loc (gimple_location (stmt),
2224 gimple_cond_code (stmt), boolean_type_node,
2225 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2226 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2227 val = gimple_switch_index (stmt);
2229 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2230 cfg_altered = true;
2232 /* If we simplified a statement in such a way as to be shown that it
2233 cannot trap, update the eh information and the cfg to match. */
2234 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2236 bitmap_set_bit (need_eh_cleanup, bb->index);
2237 if (dump_file && (dump_flags & TDF_DETAILS))
2238 fprintf (dump_file, " Flagged to clear EH edges.\n");
2242 /* Queue the statement to be re-scanned after all the
2243 AVAIL_EXPRS have been processed. The change buffer stack for
2244 all the pushed statements will be processed when this queue
2245 is emptied. */
2246 if (may_have_exposed_new_symbols)
2247 VEC_safe_push (gimple, heap, stmts_to_rescan, gsi_stmt (si));
2250 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2251 If found, return its LHS. Otherwise insert STMT in the table and
2252 return NULL_TREE.
2254 Also, when an expression is first inserted in the table, it is also
2255 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2256 we finish processing this block and its children. */
2258 static tree
2259 lookup_avail_expr (gimple stmt, bool insert)
2261 void **slot;
2262 tree lhs;
2263 tree temp;
2264 struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
2266 /* Get LHS of assignment or call, else NULL_TREE. */
2267 lhs = gimple_get_lhs (stmt);
2269 initialize_hash_element (stmt, lhs, element);
2271 if (dump_file && (dump_flags & TDF_DETAILS))
2273 fprintf (dump_file, "LKUP ");
2274 print_expr_hash_elt (dump_file, element);
2277 /* Don't bother remembering constant assignments and copy operations.
2278 Constants and copy operations are handled by the constant/copy propagator
2279 in optimize_stmt. */
2280 if (element->expr.kind == EXPR_SINGLE
2281 && (TREE_CODE (element->expr.ops.single.rhs) == SSA_NAME
2282 || is_gimple_min_invariant (element->expr.ops.single.rhs)))
2284 free (element);
2285 return NULL_TREE;
2288 /* Finally try to find the expression in the main expression hash table. */
2289 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
2290 (insert ? INSERT : NO_INSERT));
2291 if (slot == NULL)
2293 free (element);
2294 return NULL_TREE;
2297 if (*slot == NULL)
2299 *slot = (void *) element;
2301 if (dump_file && (dump_flags & TDF_DETAILS))
2303 fprintf (dump_file, "2>>> ");
2304 print_expr_hash_elt (dump_file, element);
2307 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
2308 return NULL_TREE;
2311 /* Extract the LHS of the assignment so that it can be used as the current
2312 definition of another variable. */
2313 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2315 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2316 use the value from the const_and_copies table. */
2317 if (TREE_CODE (lhs) == SSA_NAME)
2319 temp = SSA_NAME_VALUE (lhs);
2320 if (temp)
2321 lhs = temp;
2324 free (element);
2326 if (dump_file && (dump_flags & TDF_DETAILS))
2328 fprintf (dump_file, "FIND: ");
2329 print_generic_expr (dump_file, lhs, 0);
2330 fprintf (dump_file, "\n");
2333 return lhs;
2336 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2337 for expressions using the code of the expression and the SSA numbers of
2338 its operands. */
2340 static hashval_t
2341 avail_expr_hash (const void *p)
2343 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2344 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2345 tree vuse;
2346 hashval_t val = 0;
2348 val = iterative_hash_hashable_expr (expr, val);
2350 /* If the hash table entry is not associated with a statement, then we
2351 can just hash the expression and not worry about virtual operands
2352 and such. */
2353 if (!stmt)
2354 return val;
2356 /* Add the SSA version numbers of the vuse operand. This is important
2357 because compound variables like arrays are not renamed in the
2358 operands. Rather, the rename is done on the virtual variable
2359 representing all the elements of the array. */
2360 if ((vuse = gimple_vuse (stmt)))
2361 val = iterative_hash_expr (vuse, val);
2363 return val;
2366 static hashval_t
2367 real_avail_expr_hash (const void *p)
2369 return ((const struct expr_hash_elt *)p)->hash;
2372 static int
2373 avail_expr_eq (const void *p1, const void *p2)
2375 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2376 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2377 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2378 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2379 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2380 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2382 /* This case should apply only when removing entries from the table. */
2383 if (stamp1 == stamp2)
2384 return true;
2386 /* FIXME tuples:
2387 We add stmts to a hash table and them modify them. To detect the case
2388 that we modify a stmt and then search for it, we assume that the hash
2389 is always modified by that change.
2390 We have to fully check why this doesn't happen on trunk or rewrite
2391 this in a more reliable (and easier to understand) way. */
2392 if (((const struct expr_hash_elt *)p1)->hash
2393 != ((const struct expr_hash_elt *)p2)->hash)
2394 return false;
2396 /* In case of a collision, both RHS have to be identical and have the
2397 same VUSE operands. */
2398 if (hashable_expr_equal_p (expr1, expr2)
2399 && types_compatible_p (expr1->type, expr2->type))
2401 /* Note that STMT1 and/or STMT2 may be NULL. */
2402 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2403 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2406 return false;
2409 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2410 up degenerate PHIs created by or exposed by jump threading. */
2412 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2413 NULL. */
2415 tree
2416 degenerate_phi_result (gimple phi)
2418 tree lhs = gimple_phi_result (phi);
2419 tree val = NULL;
2420 size_t i;
2422 /* Ignoring arguments which are the same as LHS, if all the remaining
2423 arguments are the same, then the PHI is a degenerate and has the
2424 value of that common argument. */
2425 for (i = 0; i < gimple_phi_num_args (phi); i++)
2427 tree arg = gimple_phi_arg_def (phi, i);
2429 if (arg == lhs)
2430 continue;
2431 else if (!val)
2432 val = arg;
2433 else if (!operand_equal_p (arg, val, 0))
2434 break;
2436 return (i == gimple_phi_num_args (phi) ? val : NULL);
2439 /* Given a statement STMT, which is either a PHI node or an assignment,
2440 remove it from the IL. */
2442 static void
2443 remove_stmt_or_phi (gimple stmt)
2445 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2447 if (gimple_code (stmt) == GIMPLE_PHI)
2448 remove_phi_node (&gsi, true);
2449 else
2451 gsi_remove (&gsi, true);
2452 release_defs (stmt);
2456 /* Given a statement STMT, which is either a PHI node or an assignment,
2457 return the "rhs" of the node, in the case of a non-degenerate
2458 phi, NULL is returned. */
2460 static tree
2461 get_rhs_or_phi_arg (gimple stmt)
2463 if (gimple_code (stmt) == GIMPLE_PHI)
2464 return degenerate_phi_result (stmt);
2465 else if (gimple_assign_single_p (stmt))
2466 return gimple_assign_rhs1 (stmt);
2467 else
2468 gcc_unreachable ();
2472 /* Given a statement STMT, which is either a PHI node or an assignment,
2473 return the "lhs" of the node. */
2475 static tree
2476 get_lhs_or_phi_result (gimple stmt)
2478 if (gimple_code (stmt) == GIMPLE_PHI)
2479 return gimple_phi_result (stmt);
2480 else if (is_gimple_assign (stmt))
2481 return gimple_assign_lhs (stmt);
2482 else
2483 gcc_unreachable ();
2486 /* Propagate RHS into all uses of LHS (when possible).
2488 RHS and LHS are derived from STMT, which is passed in solely so
2489 that we can remove it if propagation is successful.
2491 When propagating into a PHI node or into a statement which turns
2492 into a trivial copy or constant initialization, set the
2493 appropriate bit in INTERESTING_NAMEs so that we will visit those
2494 nodes as well in an effort to pick up secondary optimization
2495 opportunities. */
2497 static void
2498 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2500 /* First verify that propagation is valid and isn't going to move a
2501 loop variant variable outside its loop. */
2502 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2503 && (TREE_CODE (rhs) != SSA_NAME
2504 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2505 && may_propagate_copy (lhs, rhs)
2506 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2508 use_operand_p use_p;
2509 imm_use_iterator iter;
2510 gimple use_stmt;
2511 bool all = true;
2513 /* Dump details. */
2514 if (dump_file && (dump_flags & TDF_DETAILS))
2516 fprintf (dump_file, " Replacing '");
2517 print_generic_expr (dump_file, lhs, dump_flags);
2518 fprintf (dump_file, "' with %s '",
2519 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2520 print_generic_expr (dump_file, rhs, dump_flags);
2521 fprintf (dump_file, "'\n");
2524 /* Walk over every use of LHS and try to replace the use with RHS.
2525 At this point the only reason why such a propagation would not
2526 be successful would be if the use occurs in an ASM_EXPR. */
2527 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2529 /* Leave debug stmts alone. If we succeed in propagating
2530 all non-debug uses, we'll drop the DEF, and propagation
2531 into debug stmts will occur then. */
2532 if (gimple_debug_bind_p (use_stmt))
2533 continue;
2535 /* It's not always safe to propagate into an ASM_EXPR. */
2536 if (gimple_code (use_stmt) == GIMPLE_ASM
2537 && ! may_propagate_copy_into_asm (lhs))
2539 all = false;
2540 continue;
2543 /* Dump details. */
2544 if (dump_file && (dump_flags & TDF_DETAILS))
2546 fprintf (dump_file, " Original statement:");
2547 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2550 /* Propagate the RHS into this use of the LHS. */
2551 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2552 propagate_value (use_p, rhs);
2554 /* Special cases to avoid useless calls into the folding
2555 routines, operand scanning, etc.
2557 First, propagation into a PHI may cause the PHI to become
2558 a degenerate, so mark the PHI as interesting. No other
2559 actions are necessary.
2561 Second, if we're propagating a virtual operand and the
2562 propagation does not change the underlying _DECL node for
2563 the virtual operand, then no further actions are necessary. */
2564 if (gimple_code (use_stmt) == GIMPLE_PHI
2565 || (! is_gimple_reg (lhs)
2566 && TREE_CODE (rhs) == SSA_NAME
2567 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2569 /* Dump details. */
2570 if (dump_file && (dump_flags & TDF_DETAILS))
2572 fprintf (dump_file, " Updated statement:");
2573 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2576 /* Propagation into a PHI may expose new degenerate PHIs,
2577 so mark the result of the PHI as interesting. */
2578 if (gimple_code (use_stmt) == GIMPLE_PHI)
2580 tree result = get_lhs_or_phi_result (use_stmt);
2581 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2584 continue;
2587 /* From this point onward we are propagating into a
2588 real statement. Folding may (or may not) be possible,
2589 we may expose new operands, expose dead EH edges,
2590 etc. */
2591 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2592 cannot fold a call that simplifies to a constant,
2593 because the GIMPLE_CALL must be replaced by a
2594 GIMPLE_ASSIGN, and there is no way to effect such a
2595 transformation in-place. We might want to consider
2596 using the more general fold_stmt here. */
2597 fold_stmt_inplace (use_stmt);
2599 /* Sometimes propagation can expose new operands to the
2600 renamer. */
2601 update_stmt (use_stmt);
2603 /* Dump details. */
2604 if (dump_file && (dump_flags & TDF_DETAILS))
2606 fprintf (dump_file, " Updated statement:");
2607 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2610 /* If we replaced a variable index with a constant, then
2611 we would need to update the invariant flag for ADDR_EXPRs. */
2612 if (gimple_assign_single_p (use_stmt)
2613 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2614 recompute_tree_invariant_for_addr_expr
2615 (gimple_assign_rhs1 (use_stmt));
2617 /* If we cleaned up EH information from the statement,
2618 mark its containing block as needing EH cleanups. */
2619 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2621 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2622 if (dump_file && (dump_flags & TDF_DETAILS))
2623 fprintf (dump_file, " Flagged to clear EH edges.\n");
2626 /* Propagation may expose new trivial copy/constant propagation
2627 opportunities. */
2628 if (gimple_assign_single_p (use_stmt)
2629 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2630 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2631 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2633 tree result = get_lhs_or_phi_result (use_stmt);
2634 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2637 /* Propagation into these nodes may make certain edges in
2638 the CFG unexecutable. We want to identify them as PHI nodes
2639 at the destination of those unexecutable edges may become
2640 degenerates. */
2641 else if (gimple_code (use_stmt) == GIMPLE_COND
2642 || gimple_code (use_stmt) == GIMPLE_SWITCH
2643 || gimple_code (use_stmt) == GIMPLE_GOTO)
2645 tree val;
2647 if (gimple_code (use_stmt) == GIMPLE_COND)
2648 val = fold_binary_loc (gimple_location (use_stmt),
2649 gimple_cond_code (use_stmt),
2650 boolean_type_node,
2651 gimple_cond_lhs (use_stmt),
2652 gimple_cond_rhs (use_stmt));
2653 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2654 val = gimple_switch_index (use_stmt);
2655 else
2656 val = gimple_goto_dest (use_stmt);
2658 if (val && is_gimple_min_invariant (val))
2660 basic_block bb = gimple_bb (use_stmt);
2661 edge te = find_taken_edge (bb, val);
2662 edge_iterator ei;
2663 edge e;
2664 gimple_stmt_iterator gsi, psi;
2666 /* Remove all outgoing edges except TE. */
2667 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2669 if (e != te)
2671 /* Mark all the PHI nodes at the destination of
2672 the unexecutable edge as interesting. */
2673 for (psi = gsi_start_phis (e->dest);
2674 !gsi_end_p (psi);
2675 gsi_next (&psi))
2677 gimple phi = gsi_stmt (psi);
2679 tree result = gimple_phi_result (phi);
2680 int version = SSA_NAME_VERSION (result);
2682 bitmap_set_bit (interesting_names, version);
2685 te->probability += e->probability;
2687 te->count += e->count;
2688 remove_edge (e);
2689 cfg_altered = true;
2691 else
2692 ei_next (&ei);
2695 gsi = gsi_last_bb (gimple_bb (use_stmt));
2696 gsi_remove (&gsi, true);
2698 /* And fixup the flags on the single remaining edge. */
2699 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2700 te->flags &= ~EDGE_ABNORMAL;
2701 te->flags |= EDGE_FALLTHRU;
2702 if (te->probability > REG_BR_PROB_BASE)
2703 te->probability = REG_BR_PROB_BASE;
2708 /* Ensure there is nothing else to do. */
2709 gcc_assert (!all || has_zero_uses (lhs));
2711 /* If we were able to propagate away all uses of LHS, then
2712 we can remove STMT. */
2713 if (all)
2714 remove_stmt_or_phi (stmt);
2718 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2719 a statement that is a trivial copy or constant initialization.
2721 Attempt to eliminate T by propagating its RHS into all uses of
2722 its LHS. This may in turn set new bits in INTERESTING_NAMES
2723 for nodes we want to revisit later.
2725 All exit paths should clear INTERESTING_NAMES for the result
2726 of STMT. */
2728 static void
2729 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2731 tree lhs = get_lhs_or_phi_result (stmt);
2732 tree rhs;
2733 int version = SSA_NAME_VERSION (lhs);
2735 /* If the LHS of this statement or PHI has no uses, then we can
2736 just eliminate it. This can occur if, for example, the PHI
2737 was created by block duplication due to threading and its only
2738 use was in the conditional at the end of the block which was
2739 deleted. */
2740 if (has_zero_uses (lhs))
2742 bitmap_clear_bit (interesting_names, version);
2743 remove_stmt_or_phi (stmt);
2744 return;
2747 /* Get the RHS of the assignment or PHI node if the PHI is a
2748 degenerate. */
2749 rhs = get_rhs_or_phi_arg (stmt);
2750 if (!rhs)
2752 bitmap_clear_bit (interesting_names, version);
2753 return;
2756 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2758 /* Note that STMT may well have been deleted by now, so do
2759 not access it, instead use the saved version # to clear
2760 T's entry in the worklist. */
2761 bitmap_clear_bit (interesting_names, version);
2764 /* The first phase in degenerate PHI elimination.
2766 Eliminate the degenerate PHIs in BB, then recurse on the
2767 dominator children of BB. */
2769 static void
2770 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2772 gimple_stmt_iterator gsi;
2773 basic_block son;
2775 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2777 gimple phi = gsi_stmt (gsi);
2779 eliminate_const_or_copy (phi, interesting_names);
2782 /* Recurse into the dominator children of BB. */
2783 for (son = first_dom_son (CDI_DOMINATORS, bb);
2784 son;
2785 son = next_dom_son (CDI_DOMINATORS, son))
2786 eliminate_degenerate_phis_1 (son, interesting_names);
2790 /* A very simple pass to eliminate degenerate PHI nodes from the
2791 IL. This is meant to be fast enough to be able to be run several
2792 times in the optimization pipeline.
2794 Certain optimizations, particularly those which duplicate blocks
2795 or remove edges from the CFG can create or expose PHIs which are
2796 trivial copies or constant initializations.
2798 While we could pick up these optimizations in DOM or with the
2799 combination of copy-prop and CCP, those solutions are far too
2800 heavy-weight for our needs.
2802 This implementation has two phases so that we can efficiently
2803 eliminate the first order degenerate PHIs and second order
2804 degenerate PHIs.
2806 The first phase performs a dominator walk to identify and eliminate
2807 the vast majority of the degenerate PHIs. When a degenerate PHI
2808 is identified and eliminated any affected statements or PHIs
2809 are put on a worklist.
2811 The second phase eliminates degenerate PHIs and trivial copies
2812 or constant initializations using the worklist. This is how we
2813 pick up the secondary optimization opportunities with minimal
2814 cost. */
2816 static unsigned int
2817 eliminate_degenerate_phis (void)
2819 bitmap interesting_names;
2820 bitmap interesting_names1;
2822 /* Bitmap of blocks which need EH information updated. We can not
2823 update it on-the-fly as doing so invalidates the dominator tree. */
2824 need_eh_cleanup = BITMAP_ALLOC (NULL);
2826 /* INTERESTING_NAMES is effectively our worklist, indexed by
2827 SSA_NAME_VERSION.
2829 A set bit indicates that the statement or PHI node which
2830 defines the SSA_NAME should be (re)examined to determine if
2831 it has become a degenerate PHI or trivial const/copy propagation
2832 opportunity.
2834 Experiments have show we generally get better compilation
2835 time behavior with bitmaps rather than sbitmaps. */
2836 interesting_names = BITMAP_ALLOC (NULL);
2837 interesting_names1 = BITMAP_ALLOC (NULL);
2839 calculate_dominance_info (CDI_DOMINATORS);
2840 cfg_altered = false;
2842 /* First phase. Eliminate degenerate PHIs via a dominator
2843 walk of the CFG.
2845 Experiments have indicated that we generally get better
2846 compile-time behavior by visiting blocks in the first
2847 phase in dominator order. Presumably this is because walking
2848 in dominator order leaves fewer PHIs for later examination
2849 by the worklist phase. */
2850 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2852 /* Second phase. Eliminate second order degenerate PHIs as well
2853 as trivial copies or constant initializations identified by
2854 the first phase or this phase. Basically we keep iterating
2855 until our set of INTERESTING_NAMEs is empty. */
2856 while (!bitmap_empty_p (interesting_names))
2858 unsigned int i;
2859 bitmap_iterator bi;
2861 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2862 changed during the loop. Copy it to another bitmap and
2863 use that. */
2864 bitmap_copy (interesting_names1, interesting_names);
2866 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2868 tree name = ssa_name (i);
2870 /* Ignore SSA_NAMEs that have been released because
2871 their defining statement was deleted (unreachable). */
2872 if (name)
2873 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2874 interesting_names);
2878 if (cfg_altered)
2879 free_dominance_info (CDI_DOMINATORS);
2881 /* Propagation of const and copies may make some EH edges dead. Purge
2882 such edges from the CFG as needed. */
2883 if (!bitmap_empty_p (need_eh_cleanup))
2885 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2886 BITMAP_FREE (need_eh_cleanup);
2889 BITMAP_FREE (interesting_names);
2890 BITMAP_FREE (interesting_names1);
2891 return 0;
2894 struct gimple_opt_pass pass_phi_only_cprop =
2897 GIMPLE_PASS,
2898 "phicprop", /* name */
2899 gate_dominator, /* gate */
2900 eliminate_degenerate_phis, /* execute */
2901 NULL, /* sub */
2902 NULL, /* next */
2903 0, /* static_pass_number */
2904 TV_TREE_PHI_CPROP, /* tv_id */
2905 PROP_cfg | PROP_ssa, /* properties_required */
2906 0, /* properties_provided */
2907 0, /* properties_destroyed */
2908 0, /* todo_flags_start */
2909 TODO_cleanup_cfg
2910 | TODO_dump_func
2911 | TODO_ggc_collect
2912 | TODO_verify_ssa
2913 | TODO_verify_stmts
2914 | TODO_update_ssa /* todo_flags_finish */