2009-07-17 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-dom.c
blobe32c72bdcdb251064799991759931905df028216
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "cfgloop.h"
33 #include "output.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
45 #include "params.h"
47 /* This file implements optimizations on the dominator tree. */
49 /* Representation of a "naked" right-hand-side expression, to be used
50 in recording available expressions in the expression hash table. */
52 enum expr_kind
54 EXPR_SINGLE,
55 EXPR_UNARY,
56 EXPR_BINARY,
57 EXPR_CALL
60 struct hashable_expr
62 tree type;
63 enum expr_kind kind;
64 union {
65 struct { tree rhs; } single;
66 struct { enum tree_code op; tree opnd; } unary;
67 struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
68 struct { tree fn; bool pure; size_t nargs; tree *args; } call;
69 } ops;
72 /* Structure for recording known values of a conditional expression
73 at the exits from its block. */
75 struct cond_equivalence
77 struct hashable_expr cond;
78 tree value;
81 /* Structure for recording edge equivalences as well as any pending
82 edge redirections during the dominator optimizer.
84 Computing and storing the edge equivalences instead of creating
85 them on-demand can save significant amounts of time, particularly
86 for pathological cases involving switch statements.
88 These structures live for a single iteration of the dominator
89 optimizer in the edge's AUX field. At the end of an iteration we
90 free each of these structures and update the AUX field to point
91 to any requested redirection target (the code for updating the
92 CFG and SSA graph for edge redirection expects redirection edge
93 targets to be in the AUX field for each edge. */
95 struct edge_info
97 /* If this edge creates a simple equivalence, the LHS and RHS of
98 the equivalence will be stored here. */
99 tree lhs;
100 tree rhs;
102 /* Traversing an edge may also indicate one or more particular conditions
103 are true or false. The number of recorded conditions can vary, but
104 can be determined by the condition's code. So we have an array
105 and its maximum index rather than use a varray. */
106 struct cond_equivalence *cond_equivalences;
107 unsigned int max_cond_equivalences;
110 /* Hash table with expressions made available during the renaming process.
111 When an assignment of the form X_i = EXPR is found, the statement is
112 stored in this table. If the same expression EXPR is later found on the
113 RHS of another statement, it is replaced with X_i (thus performing
114 global redundancy elimination). Similarly as we pass through conditionals
115 we record the conditional itself as having either a true or false value
116 in this table. */
117 static htab_t avail_exprs;
119 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
120 expressions it enters into the hash table along with a marker entry
121 (null). When we finish processing the block, we pop off entries and
122 remove the expressions from the global hash table until we hit the
123 marker. */
124 typedef struct expr_hash_elt * expr_hash_elt_t;
125 DEF_VEC_P(expr_hash_elt_t);
126 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
128 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
130 /* Stack of statements we need to rescan during finalization for newly
131 exposed variables.
133 Statement rescanning must occur after the current block's available
134 expressions are removed from AVAIL_EXPRS. Else we may change the
135 hash code for an expression and be unable to find/remove it from
136 AVAIL_EXPRS. */
137 static VEC(gimple,heap) *stmts_to_rescan;
139 /* Structure for entries in the expression hash table. */
141 struct expr_hash_elt
143 /* The value (lhs) of this expression. */
144 tree lhs;
146 /* The expression (rhs) we want to record. */
147 struct hashable_expr expr;
149 /* The stmt pointer if this element corresponds to a statement. */
150 gimple stmt;
152 /* The hash value for RHS. */
153 hashval_t hash;
155 /* A unique stamp, typically the address of the hash
156 element itself, used in removing entries from the table. */
157 struct expr_hash_elt *stamp;
160 /* Stack of dest,src pairs that need to be restored during finalization.
162 A NULL entry is used to mark the end of pairs which need to be
163 restored during finalization of this block. */
164 static VEC(tree,heap) *const_and_copies_stack;
166 /* Track whether or not we have changed the control flow graph. */
167 static bool cfg_altered;
169 /* Bitmap of blocks that have had EH statements cleaned. We should
170 remove their dead edges eventually. */
171 static bitmap need_eh_cleanup;
173 /* Statistics for dominator optimizations. */
174 struct opt_stats_d
176 long num_stmts;
177 long num_exprs_considered;
178 long num_re;
179 long num_const_prop;
180 long num_copy_prop;
183 static struct opt_stats_d opt_stats;
185 /* Local functions. */
186 static void optimize_stmt (basic_block, gimple_stmt_iterator);
187 static tree lookup_avail_expr (gimple, bool);
188 static hashval_t avail_expr_hash (const void *);
189 static hashval_t real_avail_expr_hash (const void *);
190 static int avail_expr_eq (const void *, const void *);
191 static void htab_statistics (FILE *, htab_t);
192 static void record_cond (struct cond_equivalence *);
193 static void record_const_or_copy (tree, tree);
194 static void record_equality (tree, tree);
195 static void record_equivalences_from_phis (basic_block);
196 static void record_equivalences_from_incoming_edge (basic_block);
197 static bool eliminate_redundant_computations (gimple_stmt_iterator *);
198 static void record_equivalences_from_stmt (gimple, int);
199 static void dom_thread_across_edge (struct dom_walk_data *, edge);
200 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
201 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
202 static void remove_local_expressions_from_table (void);
203 static void restore_vars_to_original_value (void);
204 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
207 /* Given a statement STMT, initialize the hash table element pointed to
208 by ELEMENT. */
210 static void
211 initialize_hash_element (gimple stmt, tree lhs,
212 struct expr_hash_elt *element)
214 enum gimple_code code = gimple_code (stmt);
215 struct hashable_expr *expr = &element->expr;
217 if (code == GIMPLE_ASSIGN)
219 enum tree_code subcode = strip_nv (gimple_assign_rhs_code (stmt));
221 expr->type = NULL_TREE;
223 switch (get_gimple_rhs_class (subcode))
225 case GIMPLE_SINGLE_RHS:
226 expr->kind = EXPR_SINGLE;
227 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
228 break;
229 case GIMPLE_UNARY_RHS:
230 expr->kind = EXPR_UNARY;
231 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
232 expr->ops.unary.op = subcode;
233 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
234 break;
235 case GIMPLE_BINARY_RHS:
236 expr->kind = EXPR_BINARY;
237 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
238 expr->ops.binary.op = subcode;
239 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
240 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
241 break;
242 default:
243 gcc_unreachable ();
246 else if (code == GIMPLE_COND)
248 expr->type = boolean_type_node;
249 expr->kind = EXPR_BINARY;
250 expr->ops.binary.op = gimple_cond_code (stmt);
251 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
252 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
254 else if (code == GIMPLE_CALL)
256 size_t nargs = gimple_call_num_args (stmt);
257 size_t i;
259 gcc_assert (gimple_call_lhs (stmt));
261 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
262 expr->kind = EXPR_CALL;
263 expr->ops.call.fn = gimple_call_fn (stmt);
265 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
266 expr->ops.call.pure = true;
267 else
268 expr->ops.call.pure = false;
270 expr->ops.call.nargs = nargs;
271 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
272 for (i = 0; i < nargs; i++)
273 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
275 else if (code == GIMPLE_SWITCH)
277 expr->type = TREE_TYPE (gimple_switch_index (stmt));
278 expr->kind = EXPR_SINGLE;
279 expr->ops.single.rhs = gimple_switch_index (stmt);
281 else if (code == GIMPLE_GOTO)
283 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
284 expr->kind = EXPR_SINGLE;
285 expr->ops.single.rhs = gimple_goto_dest (stmt);
287 else
288 gcc_unreachable ();
290 element->lhs = lhs;
291 element->stmt = stmt;
292 element->hash = avail_expr_hash (element);
293 element->stamp = element;
296 /* Given a conditional expression COND as a tree, initialize
297 a hashable_expr expression EXPR. The conditional must be a
298 comparison or logical negation. A constant or a variable is
299 not permitted. */
301 static void
302 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
304 expr->type = boolean_type_node;
306 if (COMPARISON_CLASS_P (cond))
308 expr->kind = EXPR_BINARY;
309 expr->ops.binary.op = TREE_CODE (cond);
310 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
311 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
313 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
315 expr->kind = EXPR_UNARY;
316 expr->ops.unary.op = TRUTH_NOT_EXPR;
317 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
319 else
320 gcc_unreachable ();
323 /* Given a hashable_expr expression EXPR and an LHS,
324 initialize the hash table element pointed to by ELEMENT. */
326 static void
327 initialize_hash_element_from_expr (struct hashable_expr *expr,
328 tree lhs,
329 struct expr_hash_elt *element)
331 element->expr = *expr;
332 element->lhs = lhs;
333 element->stmt = NULL;
334 element->hash = avail_expr_hash (element);
335 element->stamp = element;
338 /* Compare two hashable_expr structures for equivalence.
339 They are considered equivalent when the the expressions
340 they denote must necessarily be equal. The logic is intended
341 to follow that of operand_equal_p in fold-const.c */
343 static bool
344 hashable_expr_equal_p (const struct hashable_expr *expr0,
345 const struct hashable_expr *expr1)
347 tree type0 = expr0->type;
348 tree type1 = expr1->type;
350 /* If either type is NULL, there is nothing to check. */
351 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
352 return false;
354 /* If both types don't have the same signedness, precision, and mode,
355 then we can't consider them equal. */
356 if (type0 != type1
357 && (TREE_CODE (type0) == ERROR_MARK
358 || TREE_CODE (type1) == ERROR_MARK
359 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
360 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
361 || TYPE_MODE (type0) != TYPE_MODE (type1)))
362 return false;
364 if (expr0->kind != expr1->kind)
365 return false;
367 switch (expr0->kind)
369 case EXPR_SINGLE:
370 return operand_equal_p (expr0->ops.single.rhs,
371 expr1->ops.single.rhs, 0);
373 case EXPR_UNARY:
374 if (expr0->ops.unary.op != expr1->ops.unary.op)
375 return false;
377 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
378 || expr0->ops.unary.op == NON_LVALUE_EXPR)
379 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
380 return false;
382 return operand_equal_p (expr0->ops.unary.opnd,
383 expr1->ops.unary.opnd, 0);
385 case EXPR_BINARY:
387 if (expr0->ops.binary.op != expr1->ops.binary.op)
388 return false;
390 if (operand_equal_p (expr0->ops.binary.opnd0,
391 expr1->ops.binary.opnd0, 0)
392 && operand_equal_p (expr0->ops.binary.opnd1,
393 expr1->ops.binary.opnd1, 0))
394 return true;
396 /* For commutative ops, allow the other order. */
397 return (commutative_tree_code (expr0->ops.binary.op)
398 && operand_equal_p (expr0->ops.binary.opnd0,
399 expr1->ops.binary.opnd1, 0)
400 && operand_equal_p (expr0->ops.binary.opnd1,
401 expr1->ops.binary.opnd0, 0));
404 case EXPR_CALL:
406 size_t i;
408 /* If the calls are to different functions, then they
409 clearly cannot be equal. */
410 if (! operand_equal_p (expr0->ops.call.fn,
411 expr1->ops.call.fn, 0))
412 return false;
414 if (! expr0->ops.call.pure)
415 return false;
417 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
418 return false;
420 for (i = 0; i < expr0->ops.call.nargs; i++)
421 if (! operand_equal_p (expr0->ops.call.args[i],
422 expr1->ops.call.args[i], 0))
423 return false;
425 return true;
428 default:
429 gcc_unreachable ();
433 /* Compute a hash value for a hashable_expr value EXPR and a
434 previously accumulated hash value VAL. If two hashable_expr
435 values compare equal with hashable_expr_equal_p, they must
436 hash to the same value, given an identical value of VAL.
437 The logic is intended to follow iterative_hash_expr in tree.c. */
439 static hashval_t
440 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
442 switch (expr->kind)
444 case EXPR_SINGLE:
445 val = iterative_hash_expr (expr->ops.single.rhs, val);
446 break;
448 case EXPR_UNARY:
449 val = iterative_hash_object (expr->ops.unary.op, val);
451 /* Make sure to include signedness in the hash computation.
452 Don't hash the type, that can lead to having nodes which
453 compare equal according to operand_equal_p, but which
454 have different hash codes. */
455 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
456 || expr->ops.unary.op == NON_LVALUE_EXPR)
457 val += TYPE_UNSIGNED (expr->type);
459 val = iterative_hash_expr (expr->ops.unary.opnd, val);
460 break;
462 case EXPR_BINARY:
463 val = iterative_hash_object (expr->ops.binary.op, val);
464 if (commutative_tree_code (expr->ops.binary.op))
465 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
466 expr->ops.binary.opnd1, val);
467 else
469 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
470 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
472 break;
474 case EXPR_CALL:
476 size_t i;
477 enum tree_code code = CALL_EXPR;
479 val = iterative_hash_object (code, val);
480 val = iterative_hash_expr (expr->ops.call.fn, val);
481 for (i = 0; i < expr->ops.call.nargs; i++)
482 val = iterative_hash_expr (expr->ops.call.args[i], val);
484 break;
486 default:
487 gcc_unreachable ();
490 return val;
493 /* Print a diagnostic dump of an expression hash table entry. */
495 static void
496 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
498 if (element->stmt)
499 fprintf (stream, "STMT ");
500 else
501 fprintf (stream, "COND ");
503 if (element->lhs)
505 print_generic_expr (stream, element->lhs, 0);
506 fprintf (stream, " = ");
509 switch (element->expr.kind)
511 case EXPR_SINGLE:
512 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
513 break;
515 case EXPR_UNARY:
516 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
517 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
518 break;
520 case EXPR_BINARY:
521 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
522 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
523 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
524 break;
526 case EXPR_CALL:
528 size_t i;
529 size_t nargs = element->expr.ops.call.nargs;
531 print_generic_expr (stream, element->expr.ops.call.fn, 0);
532 fprintf (stream, " (");
533 for (i = 0; i < nargs; i++)
535 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
536 if (i + 1 < nargs)
537 fprintf (stream, ", ");
539 fprintf (stream, ")");
541 break;
543 fprintf (stream, "\n");
545 if (element->stmt)
547 fprintf (stream, " ");
548 print_gimple_stmt (stream, element->stmt, 0, 0);
552 /* Delete an expr_hash_elt and reclaim its storage. */
554 static void
555 free_expr_hash_elt (void *elt)
557 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
559 if (element->expr.kind == EXPR_CALL)
560 free (element->expr.ops.call.args);
562 free (element);
565 /* Allocate an EDGE_INFO for edge E and attach it to E.
566 Return the new EDGE_INFO structure. */
568 static struct edge_info *
569 allocate_edge_info (edge e)
571 struct edge_info *edge_info;
573 edge_info = XCNEW (struct edge_info);
575 e->aux = edge_info;
576 return edge_info;
579 /* Free all EDGE_INFO structures associated with edges in the CFG.
580 If a particular edge can be threaded, copy the redirection
581 target from the EDGE_INFO structure into the edge's AUX field
582 as required by code to update the CFG and SSA graph for
583 jump threading. */
585 static void
586 free_all_edge_infos (void)
588 basic_block bb;
589 edge_iterator ei;
590 edge e;
592 FOR_EACH_BB (bb)
594 FOR_EACH_EDGE (e, ei, bb->preds)
596 struct edge_info *edge_info = (struct edge_info *) e->aux;
598 if (edge_info)
600 if (edge_info->cond_equivalences)
601 free (edge_info->cond_equivalences);
602 free (edge_info);
603 e->aux = NULL;
609 /* Jump threading, redundancy elimination and const/copy propagation.
611 This pass may expose new symbols that need to be renamed into SSA. For
612 every new symbol exposed, its corresponding bit will be set in
613 VARS_TO_RENAME. */
615 static unsigned int
616 tree_ssa_dominator_optimize (void)
618 struct dom_walk_data walk_data;
620 memset (&opt_stats, 0, sizeof (opt_stats));
622 /* Create our hash tables. */
623 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
624 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
625 const_and_copies_stack = VEC_alloc (tree, heap, 20);
626 stmts_to_rescan = VEC_alloc (gimple, heap, 20);
627 need_eh_cleanup = BITMAP_ALLOC (NULL);
629 /* Setup callbacks for the generic dominator tree walker. */
630 walk_data.dom_direction = CDI_DOMINATORS;
631 walk_data.initialize_block_local_data = NULL;
632 walk_data.before_dom_children = dom_opt_enter_block;
633 walk_data.after_dom_children = dom_opt_leave_block;
634 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
635 When we attach more stuff we'll need to fill this out with a real
636 structure. */
637 walk_data.global_data = NULL;
638 walk_data.block_local_data_size = 0;
640 /* Now initialize the dominator walker. */
641 init_walk_dominator_tree (&walk_data);
643 calculate_dominance_info (CDI_DOMINATORS);
644 cfg_altered = false;
646 /* We need to know loop structures in order to avoid destroying them
647 in jump threading. Note that we still can e.g. thread through loop
648 headers to an exit edge, or through loop header to the loop body, assuming
649 that we update the loop info. */
650 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
652 /* Initialize the value-handle array. */
653 threadedge_initialize_values ();
655 /* We need accurate information regarding back edges in the CFG
656 for jump threading; this may include back edges that are not part of
657 a single loop. */
658 mark_dfs_back_edges ();
660 /* Recursively walk the dominator tree optimizing statements. */
661 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
664 gimple_stmt_iterator gsi;
665 basic_block bb;
666 FOR_EACH_BB (bb)
667 {for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
668 update_stmt_if_modified (gsi_stmt (gsi));
672 /* If we exposed any new variables, go ahead and put them into
673 SSA form now, before we handle jump threading. This simplifies
674 interactions between rewriting of _DECL nodes into SSA form
675 and rewriting SSA_NAME nodes into SSA form after block
676 duplication and CFG manipulation. */
677 update_ssa (TODO_update_ssa);
679 free_all_edge_infos ();
681 /* Thread jumps, creating duplicate blocks as needed. */
682 cfg_altered |= thread_through_all_blocks (first_pass_instance);
684 if (cfg_altered)
685 free_dominance_info (CDI_DOMINATORS);
687 /* Removal of statements may make some EH edges dead. Purge
688 such edges from the CFG as needed. */
689 if (!bitmap_empty_p (need_eh_cleanup))
691 unsigned i;
692 bitmap_iterator bi;
694 /* Jump threading may have created forwarder blocks from blocks
695 needing EH cleanup; the new successor of these blocks, which
696 has inherited from the original block, needs the cleanup. */
697 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
699 basic_block bb = BASIC_BLOCK (i);
700 if (single_succ_p (bb) == 1
701 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
703 bitmap_clear_bit (need_eh_cleanup, i);
704 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
708 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
709 bitmap_zero (need_eh_cleanup);
712 statistics_counter_event (cfun, "Redundant expressions eliminated",
713 opt_stats.num_re);
714 statistics_counter_event (cfun, "Constants propagated",
715 opt_stats.num_const_prop);
716 statistics_counter_event (cfun, "Copies propagated",
717 opt_stats.num_copy_prop);
719 /* Debugging dumps. */
720 if (dump_file && (dump_flags & TDF_STATS))
721 dump_dominator_optimization_stats (dump_file);
723 loop_optimizer_finalize ();
725 /* Delete our main hashtable. */
726 htab_delete (avail_exprs);
728 /* And finalize the dominator walker. */
729 fini_walk_dominator_tree (&walk_data);
731 /* Free asserted bitmaps and stacks. */
732 BITMAP_FREE (need_eh_cleanup);
734 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
735 VEC_free (tree, heap, const_and_copies_stack);
736 VEC_free (gimple, heap, stmts_to_rescan);
738 /* Free the value-handle array. */
739 threadedge_finalize_values ();
740 ssa_name_values = NULL;
742 return 0;
745 static bool
746 gate_dominator (void)
748 return flag_tree_dom != 0;
751 struct gimple_opt_pass pass_dominator =
754 GIMPLE_PASS,
755 "dom", /* name */
756 gate_dominator, /* gate */
757 tree_ssa_dominator_optimize, /* execute */
758 NULL, /* sub */
759 NULL, /* next */
760 0, /* static_pass_number */
761 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
762 PROP_cfg | PROP_ssa, /* properties_required */
763 0, /* properties_provided */
764 0, /* properties_destroyed */
765 0, /* todo_flags_start */
766 TODO_dump_func
767 | TODO_update_ssa
768 | TODO_cleanup_cfg
769 | TODO_verify_ssa /* todo_flags_finish */
774 /* Given a conditional statement CONDSTMT, convert the
775 condition to a canonical form. */
777 static void
778 canonicalize_comparison (gimple condstmt)
780 tree op0;
781 tree op1;
782 enum tree_code code;
784 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
786 op0 = gimple_cond_lhs (condstmt);
787 op1 = gimple_cond_rhs (condstmt);
789 code = gimple_cond_code (condstmt);
791 /* If it would be profitable to swap the operands, then do so to
792 canonicalize the statement, enabling better optimization.
794 By placing canonicalization of such expressions here we
795 transparently keep statements in canonical form, even
796 when the statement is modified. */
797 if (tree_swap_operands_p (op0, op1, false))
799 /* For relationals we need to swap the operands
800 and change the code. */
801 if (code == LT_EXPR
802 || code == GT_EXPR
803 || code == LE_EXPR
804 || code == GE_EXPR)
806 code = swap_tree_comparison (code);
808 gimple_cond_set_code (condstmt, code);
809 gimple_cond_set_lhs (condstmt, op1);
810 gimple_cond_set_rhs (condstmt, op0);
812 update_stmt (condstmt);
817 /* Initialize local stacks for this optimizer and record equivalences
818 upon entry to BB. Equivalences can come from the edge traversed to
819 reach BB or they may come from PHI nodes at the start of BB. */
821 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
822 LIMIT entries left in LOCALs. */
824 static void
825 remove_local_expressions_from_table (void)
827 /* Remove all the expressions made available in this block. */
828 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
830 struct expr_hash_elt element;
831 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
833 if (victim == NULL)
834 break;
836 element = *victim;
838 /* This must precede the actual removal from the hash table,
839 as ELEMENT and the table entry may share a call argument
840 vector which will be freed during removal. */
841 if (dump_file && (dump_flags & TDF_DETAILS))
843 fprintf (dump_file, "<<<< ");
844 print_expr_hash_elt (dump_file, &element);
847 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
851 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
852 CONST_AND_COPIES to its original state, stopping when we hit a
853 NULL marker. */
855 static void
856 restore_vars_to_original_value (void)
858 while (VEC_length (tree, const_and_copies_stack) > 0)
860 tree prev_value, dest;
862 dest = VEC_pop (tree, const_and_copies_stack);
864 if (dest == NULL)
865 break;
867 if (dump_file && (dump_flags & TDF_DETAILS))
869 fprintf (dump_file, "<<<< COPY ");
870 print_generic_expr (dump_file, dest, 0);
871 fprintf (dump_file, " = ");
872 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
873 fprintf (dump_file, "\n");
876 prev_value = VEC_pop (tree, const_and_copies_stack);
877 set_ssa_name_value (dest, prev_value);
881 /* A trivial wrapper so that we can present the generic jump
882 threading code with a simple API for simplifying statements. */
883 static tree
884 simplify_stmt_for_jump_threading (gimple stmt,
885 gimple within_stmt ATTRIBUTE_UNUSED)
887 return lookup_avail_expr (stmt, false);
890 /* Wrapper for common code to attempt to thread an edge. For example,
891 it handles lazily building the dummy condition and the bookkeeping
892 when jump threading is successful. */
894 static void
895 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
897 if (! walk_data->global_data)
899 gimple dummy_cond =
900 gimple_build_cond (NE_EXPR,
901 integer_zero_node, integer_zero_node,
902 NULL, NULL);
903 walk_data->global_data = dummy_cond;
906 thread_across_edge ((gimple) walk_data->global_data, e, false,
907 &const_and_copies_stack,
908 simplify_stmt_for_jump_threading);
911 /* PHI nodes can create equivalences too.
913 Ignoring any alternatives which are the same as the result, if
914 all the alternatives are equal, then the PHI node creates an
915 equivalence. */
917 static void
918 record_equivalences_from_phis (basic_block bb)
920 gimple_stmt_iterator gsi;
922 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
924 gimple phi = gsi_stmt (gsi);
926 tree lhs = gimple_phi_result (phi);
927 tree rhs = NULL;
928 size_t i;
930 for (i = 0; i < gimple_phi_num_args (phi); i++)
932 tree t = gimple_phi_arg_def (phi, i);
934 /* Ignore alternatives which are the same as our LHS. Since
935 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
936 can simply compare pointers. */
937 if (lhs == t)
938 continue;
940 /* If we have not processed an alternative yet, then set
941 RHS to this alternative. */
942 if (rhs == NULL)
943 rhs = t;
944 /* If we have processed an alternative (stored in RHS), then
945 see if it is equal to this one. If it isn't, then stop
946 the search. */
947 else if (! operand_equal_for_phi_arg_p (rhs, t))
948 break;
951 /* If we had no interesting alternatives, then all the RHS alternatives
952 must have been the same as LHS. */
953 if (!rhs)
954 rhs = lhs;
956 /* If we managed to iterate through each PHI alternative without
957 breaking out of the loop, then we have a PHI which may create
958 a useful equivalence. We do not need to record unwind data for
959 this, since this is a true assignment and not an equivalence
960 inferred from a comparison. All uses of this ssa name are dominated
961 by this assignment, so unwinding just costs time and space. */
962 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
963 set_ssa_name_value (lhs, rhs);
967 /* Ignoring loop backedges, if BB has precisely one incoming edge then
968 return that edge. Otherwise return NULL. */
969 static edge
970 single_incoming_edge_ignoring_loop_edges (basic_block bb)
972 edge retval = NULL;
973 edge e;
974 edge_iterator ei;
976 FOR_EACH_EDGE (e, ei, bb->preds)
978 /* A loop back edge can be identified by the destination of
979 the edge dominating the source of the edge. */
980 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
981 continue;
983 /* If we have already seen a non-loop edge, then we must have
984 multiple incoming non-loop edges and thus we return NULL. */
985 if (retval)
986 return NULL;
988 /* This is the first non-loop incoming edge we have found. Record
989 it. */
990 retval = e;
993 return retval;
996 /* Record any equivalences created by the incoming edge to BB. If BB
997 has more than one incoming edge, then no equivalence is created. */
999 static void
1000 record_equivalences_from_incoming_edge (basic_block bb)
1002 edge e;
1003 basic_block parent;
1004 struct edge_info *edge_info;
1006 /* If our parent block ended with a control statement, then we may be
1007 able to record some equivalences based on which outgoing edge from
1008 the parent was followed. */
1009 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1011 e = single_incoming_edge_ignoring_loop_edges (bb);
1013 /* If we had a single incoming edge from our parent block, then enter
1014 any data associated with the edge into our tables. */
1015 if (e && e->src == parent)
1017 unsigned int i;
1019 edge_info = (struct edge_info *) e->aux;
1021 if (edge_info)
1023 tree lhs = edge_info->lhs;
1024 tree rhs = edge_info->rhs;
1025 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1027 if (lhs)
1028 record_equality (lhs, rhs);
1030 if (cond_equivalences)
1031 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1032 record_cond (&cond_equivalences[i]);
1037 /* Dump SSA statistics on FILE. */
1039 void
1040 dump_dominator_optimization_stats (FILE *file)
1042 fprintf (file, "Total number of statements: %6ld\n\n",
1043 opt_stats.num_stmts);
1044 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1045 opt_stats.num_exprs_considered);
1047 fprintf (file, "\nHash table statistics:\n");
1049 fprintf (file, " avail_exprs: ");
1050 htab_statistics (file, avail_exprs);
1054 /* Dump SSA statistics on stderr. */
1056 void
1057 debug_dominator_optimization_stats (void)
1059 dump_dominator_optimization_stats (stderr);
1063 /* Dump statistics for the hash table HTAB. */
1065 static void
1066 htab_statistics (FILE *file, htab_t htab)
1068 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1069 (long) htab_size (htab),
1070 (long) htab_elements (htab),
1071 htab_collisions (htab));
1075 /* Enter condition equivalence into the expression hash table.
1076 This indicates that a conditional expression has a known
1077 boolean value. */
1079 static void
1080 record_cond (struct cond_equivalence *p)
1082 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1083 void **slot;
1085 initialize_hash_element_from_expr (&p->cond, p->value, element);
1087 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1088 element->hash, INSERT);
1089 if (*slot == NULL)
1091 *slot = (void *) element;
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1095 fprintf (dump_file, "1>>> ");
1096 print_expr_hash_elt (dump_file, element);
1099 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1101 else
1102 free (element);
1105 /* Build a cond_equivalence record indicating that the comparison
1106 CODE holds between operands OP0 and OP1. */
1108 static void
1109 build_and_record_new_cond (enum tree_code code,
1110 tree op0, tree op1,
1111 struct cond_equivalence *p)
1113 struct hashable_expr *cond = &p->cond;
1115 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1117 cond->type = boolean_type_node;
1118 cond->kind = EXPR_BINARY;
1119 cond->ops.binary.op = code;
1120 cond->ops.binary.opnd0 = op0;
1121 cond->ops.binary.opnd1 = op1;
1123 p->value = boolean_true_node;
1126 /* Record that COND is true and INVERTED is false into the edge information
1127 structure. Also record that any conditions dominated by COND are true
1128 as well.
1130 For example, if a < b is true, then a <= b must also be true. */
1132 static void
1133 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1135 tree op0, op1;
1137 if (!COMPARISON_CLASS_P (cond))
1138 return;
1140 op0 = TREE_OPERAND (cond, 0);
1141 op1 = TREE_OPERAND (cond, 1);
1143 switch (TREE_CODE (cond))
1145 case LT_EXPR:
1146 case GT_EXPR:
1147 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1149 edge_info->max_cond_equivalences = 6;
1150 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 6);
1151 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1152 &edge_info->cond_equivalences[4]);
1153 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1154 &edge_info->cond_equivalences[5]);
1156 else
1158 edge_info->max_cond_equivalences = 4;
1159 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1162 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1163 ? LE_EXPR : GE_EXPR),
1164 op0, op1, &edge_info->cond_equivalences[2]);
1165 build_and_record_new_cond (NE_EXPR, op0, op1,
1166 &edge_info->cond_equivalences[3]);
1167 break;
1169 case GE_EXPR:
1170 case LE_EXPR:
1171 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1173 edge_info->max_cond_equivalences = 3;
1174 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 3);
1175 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1176 &edge_info->cond_equivalences[2]);
1178 else
1180 edge_info->max_cond_equivalences = 2;
1181 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1183 break;
1185 case EQ_EXPR:
1186 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1188 edge_info->max_cond_equivalences = 5;
1189 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 5);
1190 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1191 &edge_info->cond_equivalences[4]);
1193 else
1195 edge_info->max_cond_equivalences = 4;
1196 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1198 build_and_record_new_cond (LE_EXPR, op0, op1,
1199 &edge_info->cond_equivalences[2]);
1200 build_and_record_new_cond (GE_EXPR, op0, op1,
1201 &edge_info->cond_equivalences[3]);
1202 break;
1204 case UNORDERED_EXPR:
1205 edge_info->max_cond_equivalences = 8;
1206 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 8);
1207 build_and_record_new_cond (NE_EXPR, op0, op1,
1208 &edge_info->cond_equivalences[2]);
1209 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1210 &edge_info->cond_equivalences[3]);
1211 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1212 &edge_info->cond_equivalences[4]);
1213 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1214 &edge_info->cond_equivalences[5]);
1215 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1216 &edge_info->cond_equivalences[6]);
1217 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1218 &edge_info->cond_equivalences[7]);
1219 break;
1221 case UNLT_EXPR:
1222 case UNGT_EXPR:
1223 edge_info->max_cond_equivalences = 4;
1224 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1225 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1226 ? UNLE_EXPR : UNGE_EXPR),
1227 op0, op1, &edge_info->cond_equivalences[2]);
1228 build_and_record_new_cond (NE_EXPR, op0, op1,
1229 &edge_info->cond_equivalences[3]);
1230 break;
1232 case UNEQ_EXPR:
1233 edge_info->max_cond_equivalences = 4;
1234 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1235 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1236 &edge_info->cond_equivalences[2]);
1237 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1238 &edge_info->cond_equivalences[3]);
1239 break;
1241 case LTGT_EXPR:
1242 edge_info->max_cond_equivalences = 4;
1243 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1244 build_and_record_new_cond (NE_EXPR, op0, op1,
1245 &edge_info->cond_equivalences[2]);
1246 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1247 &edge_info->cond_equivalences[3]);
1248 break;
1250 default:
1251 edge_info->max_cond_equivalences = 2;
1252 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1253 break;
1256 /* Now store the original true and false conditions into the first
1257 two slots. */
1258 initialize_expr_from_cond (cond, &edge_info->cond_equivalences[0].cond);
1259 edge_info->cond_equivalences[0].value = boolean_true_node;
1261 /* It is possible for INVERTED to be the negation of a comparison,
1262 and not a valid RHS or GIMPLE_COND condition. This happens because
1263 invert_truthvalue may return such an expression when asked to invert
1264 a floating-point comparison. These comparisons are not assumed to
1265 obey the trichotomy law. */
1266 initialize_expr_from_cond (inverted, &edge_info->cond_equivalences[1].cond);
1267 edge_info->cond_equivalences[1].value = boolean_false_node;
1270 /* A helper function for record_const_or_copy and record_equality.
1271 Do the work of recording the value and undo info. */
1273 static void
1274 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1276 set_ssa_name_value (x, y);
1278 if (dump_file && (dump_flags & TDF_DETAILS))
1280 fprintf (dump_file, "0>>> COPY ");
1281 print_generic_expr (dump_file, x, 0);
1282 fprintf (dump_file, " = ");
1283 print_generic_expr (dump_file, y, 0);
1284 fprintf (dump_file, "\n");
1287 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1288 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1289 VEC_quick_push (tree, const_and_copies_stack, x);
1292 /* Return the loop depth of the basic block of the defining statement of X.
1293 This number should not be treated as absolutely correct because the loop
1294 information may not be completely up-to-date when dom runs. However, it
1295 will be relatively correct, and as more passes are taught to keep loop info
1296 up to date, the result will become more and more accurate. */
1299 loop_depth_of_name (tree x)
1301 gimple defstmt;
1302 basic_block defbb;
1304 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1305 if (TREE_CODE (x) != SSA_NAME)
1306 return 0;
1308 /* Otherwise return the loop depth of the defining statement's bb.
1309 Note that there may not actually be a bb for this statement, if the
1310 ssa_name is live on entry. */
1311 defstmt = SSA_NAME_DEF_STMT (x);
1312 defbb = gimple_bb (defstmt);
1313 if (!defbb)
1314 return 0;
1316 return defbb->loop_depth;
1319 /* Record that X is equal to Y in const_and_copies. Record undo
1320 information in the block-local vector. */
1322 static void
1323 record_const_or_copy (tree x, tree y)
1325 tree prev_x = SSA_NAME_VALUE (x);
1327 gcc_assert (TREE_CODE (x) == SSA_NAME);
1329 if (TREE_CODE (y) == SSA_NAME)
1331 tree tmp = SSA_NAME_VALUE (y);
1332 if (tmp)
1333 y = tmp;
1336 record_const_or_copy_1 (x, y, prev_x);
1339 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1340 This constrains the cases in which we may treat this as assignment. */
1342 static void
1343 record_equality (tree x, tree y)
1345 tree prev_x = NULL, prev_y = NULL;
1347 if (TREE_CODE (x) == SSA_NAME)
1348 prev_x = SSA_NAME_VALUE (x);
1349 if (TREE_CODE (y) == SSA_NAME)
1350 prev_y = SSA_NAME_VALUE (y);
1352 /* If one of the previous values is invariant, or invariant in more loops
1353 (by depth), then use that.
1354 Otherwise it doesn't matter which value we choose, just so
1355 long as we canonicalize on one value. */
1356 if (is_gimple_min_invariant (y))
1358 else if (is_gimple_min_invariant (x)
1359 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1360 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1361 else if (prev_x && is_gimple_min_invariant (prev_x))
1362 x = y, y = prev_x, prev_x = prev_y;
1363 else if (prev_y)
1364 y = prev_y;
1366 /* After the swapping, we must have one SSA_NAME. */
1367 if (TREE_CODE (x) != SSA_NAME)
1368 return;
1370 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1371 variable compared against zero. If we're honoring signed zeros,
1372 then we cannot record this value unless we know that the value is
1373 nonzero. */
1374 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1375 && (TREE_CODE (y) != REAL_CST
1376 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1377 return;
1379 record_const_or_copy_1 (x, y, prev_x);
1382 /* Returns true when STMT is a simple iv increment. It detects the
1383 following situation:
1385 i_1 = phi (..., i_2)
1386 i_2 = i_1 +/- ... */
1388 static bool
1389 simple_iv_increment_p (gimple stmt)
1391 tree lhs, preinc;
1392 gimple phi;
1393 size_t i;
1395 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1396 return false;
1398 lhs = gimple_assign_lhs (stmt);
1399 if (TREE_CODE (lhs) != SSA_NAME)
1400 return false;
1402 if (!PLUS_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1403 && !MINUS_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1404 return false;
1406 preinc = gimple_assign_rhs1 (stmt);
1408 if (TREE_CODE (preinc) != SSA_NAME)
1409 return false;
1411 phi = SSA_NAME_DEF_STMT (preinc);
1412 if (gimple_code (phi) != GIMPLE_PHI)
1413 return false;
1415 for (i = 0; i < gimple_phi_num_args (phi); i++)
1416 if (gimple_phi_arg_def (phi, i) == lhs)
1417 return true;
1419 return false;
1422 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1423 known value for that SSA_NAME (or NULL if no value is known).
1425 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1426 successors of BB. */
1428 static void
1429 cprop_into_successor_phis (basic_block bb)
1431 edge e;
1432 edge_iterator ei;
1434 FOR_EACH_EDGE (e, ei, bb->succs)
1436 int indx;
1437 gimple_stmt_iterator gsi;
1439 /* If this is an abnormal edge, then we do not want to copy propagate
1440 into the PHI alternative associated with this edge. */
1441 if (e->flags & EDGE_ABNORMAL)
1442 continue;
1444 gsi = gsi_start_phis (e->dest);
1445 if (gsi_end_p (gsi))
1446 continue;
1448 indx = e->dest_idx;
1449 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1451 tree new_val;
1452 use_operand_p orig_p;
1453 tree orig_val;
1454 gimple phi = gsi_stmt (gsi);
1456 /* The alternative may be associated with a constant, so verify
1457 it is an SSA_NAME before doing anything with it. */
1458 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1459 orig_val = get_use_from_ptr (orig_p);
1460 if (TREE_CODE (orig_val) != SSA_NAME)
1461 continue;
1463 /* If we have *ORIG_P in our constant/copy table, then replace
1464 ORIG_P with its value in our constant/copy table. */
1465 new_val = SSA_NAME_VALUE (orig_val);
1466 if (new_val
1467 && new_val != orig_val
1468 && (TREE_CODE (new_val) == SSA_NAME
1469 || is_gimple_min_invariant (new_val))
1470 && may_propagate_copy (orig_val, new_val))
1471 propagate_value (orig_p, new_val);
1476 /* We have finished optimizing BB, record any information implied by
1477 taking a specific outgoing edge from BB. */
1479 static void
1480 record_edge_info (basic_block bb)
1482 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1483 struct edge_info *edge_info;
1485 if (! gsi_end_p (gsi))
1487 gimple stmt = gsi_stmt (gsi);
1489 if (gimple_code (stmt) == GIMPLE_SWITCH)
1491 tree index = gimple_switch_index (stmt);
1493 if (TREE_CODE (index) == SSA_NAME)
1495 int i;
1496 int n_labels = gimple_switch_num_labels (stmt);
1497 tree *info = XCNEWVEC (tree, last_basic_block);
1498 edge e;
1499 edge_iterator ei;
1501 for (i = 0; i < n_labels; i++)
1503 tree label = gimple_switch_label (stmt, i);
1504 basic_block target_bb = label_to_block (CASE_LABEL (label));
1505 if (CASE_HIGH (label)
1506 || !CASE_LOW (label)
1507 || info[target_bb->index])
1508 info[target_bb->index] = error_mark_node;
1509 else
1510 info[target_bb->index] = label;
1513 FOR_EACH_EDGE (e, ei, bb->succs)
1515 basic_block target_bb = e->dest;
1516 tree label = info[target_bb->index];
1518 if (label != NULL && label != error_mark_node)
1520 tree x = fold_convert (TREE_TYPE (index), CASE_LOW (label));
1521 edge_info = allocate_edge_info (e);
1522 edge_info->lhs = index;
1523 edge_info->rhs = x;
1526 free (info);
1530 /* A COND_EXPR may create equivalences too. */
1531 if (gimple_code (stmt) == GIMPLE_COND)
1533 edge true_edge;
1534 edge false_edge;
1536 tree op0 = gimple_cond_lhs (stmt);
1537 tree op1 = gimple_cond_rhs (stmt);
1538 enum tree_code code = gimple_cond_code (stmt);
1540 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1542 /* Special case comparing booleans against a constant as we
1543 know the value of OP0 on both arms of the branch. i.e., we
1544 can record an equivalence for OP0 rather than COND. */
1545 if ((code == EQ_EXPR || code == NE_EXPR)
1546 && TREE_CODE (op0) == SSA_NAME
1547 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1548 && is_gimple_min_invariant (op1))
1550 if (code == EQ_EXPR)
1552 edge_info = allocate_edge_info (true_edge);
1553 edge_info->lhs = op0;
1554 edge_info->rhs = (integer_zerop (op1)
1555 ? boolean_false_node
1556 : boolean_true_node);
1558 edge_info = allocate_edge_info (false_edge);
1559 edge_info->lhs = op0;
1560 edge_info->rhs = (integer_zerop (op1)
1561 ? boolean_true_node
1562 : boolean_false_node);
1564 else
1566 edge_info = allocate_edge_info (true_edge);
1567 edge_info->lhs = op0;
1568 edge_info->rhs = (integer_zerop (op1)
1569 ? boolean_true_node
1570 : boolean_false_node);
1572 edge_info = allocate_edge_info (false_edge);
1573 edge_info->lhs = op0;
1574 edge_info->rhs = (integer_zerop (op1)
1575 ? boolean_false_node
1576 : boolean_true_node);
1579 else if (is_gimple_min_invariant (op0)
1580 && (TREE_CODE (op1) == SSA_NAME
1581 || is_gimple_min_invariant (op1)))
1583 tree cond = build2 (code, boolean_type_node, op0, op1);
1584 tree inverted = invert_truthvalue (cond);
1585 struct edge_info *edge_info;
1587 edge_info = allocate_edge_info (true_edge);
1588 record_conditions (edge_info, cond, inverted);
1590 if (code == EQ_EXPR)
1592 edge_info->lhs = op1;
1593 edge_info->rhs = op0;
1596 edge_info = allocate_edge_info (false_edge);
1597 record_conditions (edge_info, inverted, cond);
1599 if (code == NE_EXPR)
1601 edge_info->lhs = op1;
1602 edge_info->rhs = op0;
1606 else if (TREE_CODE (op0) == SSA_NAME
1607 && (is_gimple_min_invariant (op1)
1608 || TREE_CODE (op1) == SSA_NAME))
1610 tree cond = build2 (code, boolean_type_node, op0, op1);
1611 tree inverted = invert_truthvalue (cond);
1612 struct edge_info *edge_info;
1614 edge_info = allocate_edge_info (true_edge);
1615 record_conditions (edge_info, cond, inverted);
1617 if (code == EQ_EXPR)
1619 edge_info->lhs = op0;
1620 edge_info->rhs = op1;
1623 edge_info = allocate_edge_info (false_edge);
1624 record_conditions (edge_info, inverted, cond);
1626 if (TREE_CODE (cond) == NE_EXPR)
1628 edge_info->lhs = op0;
1629 edge_info->rhs = op1;
1634 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1638 static void
1639 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1640 basic_block bb)
1642 gimple_stmt_iterator gsi;
1644 if (dump_file && (dump_flags & TDF_DETAILS))
1645 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1647 /* Push a marker on the stacks of local information so that we know how
1648 far to unwind when we finalize this block. */
1649 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1650 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1652 record_equivalences_from_incoming_edge (bb);
1654 /* PHI nodes can create equivalences too. */
1655 record_equivalences_from_phis (bb);
1657 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1658 optimize_stmt (bb, gsi);
1660 /* Now prepare to process dominated blocks. */
1661 record_edge_info (bb);
1662 cprop_into_successor_phis (bb);
1665 /* We have finished processing the dominator children of BB, perform
1666 any finalization actions in preparation for leaving this node in
1667 the dominator tree. */
1669 static void
1670 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1672 gimple last;
1674 /* If we have an outgoing edge to a block with multiple incoming and
1675 outgoing edges, then we may be able to thread the edge, i.e., we
1676 may be able to statically determine which of the outgoing edges
1677 will be traversed when the incoming edge from BB is traversed. */
1678 if (single_succ_p (bb)
1679 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1680 && potentially_threadable_block (single_succ (bb)))
1682 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1684 else if ((last = last_stmt (bb))
1685 && gimple_code (last) == GIMPLE_COND
1686 && EDGE_COUNT (bb->succs) == 2
1687 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1688 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1690 edge true_edge, false_edge;
1692 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1694 /* Only try to thread the edge if it reaches a target block with
1695 more than one predecessor and more than one successor. */
1696 if (potentially_threadable_block (true_edge->dest))
1698 struct edge_info *edge_info;
1699 unsigned int i;
1701 /* Push a marker onto the available expression stack so that we
1702 unwind any expressions related to the TRUE arm before processing
1703 the false arm below. */
1704 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1705 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1707 edge_info = (struct edge_info *) true_edge->aux;
1709 /* If we have info associated with this edge, record it into
1710 our equivalence tables. */
1711 if (edge_info)
1713 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1714 tree lhs = edge_info->lhs;
1715 tree rhs = edge_info->rhs;
1717 /* If we have a simple NAME = VALUE equivalence, record it. */
1718 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1719 record_const_or_copy (lhs, rhs);
1721 /* If we have 0 = COND or 1 = COND equivalences, record them
1722 into our expression hash tables. */
1723 if (cond_equivalences)
1724 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1725 record_cond (&cond_equivalences[i]);
1728 dom_thread_across_edge (walk_data, true_edge);
1730 /* And restore the various tables to their state before
1731 we threaded this edge. */
1732 remove_local_expressions_from_table ();
1735 /* Similarly for the ELSE arm. */
1736 if (potentially_threadable_block (false_edge->dest))
1738 struct edge_info *edge_info;
1739 unsigned int i;
1741 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1742 edge_info = (struct edge_info *) false_edge->aux;
1744 /* If we have info associated with this edge, record it into
1745 our equivalence tables. */
1746 if (edge_info)
1748 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1749 tree lhs = edge_info->lhs;
1750 tree rhs = edge_info->rhs;
1752 /* If we have a simple NAME = VALUE equivalence, record it. */
1753 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1754 record_const_or_copy (lhs, rhs);
1756 /* If we have 0 = COND or 1 = COND equivalences, record them
1757 into our expression hash tables. */
1758 if (cond_equivalences)
1759 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1760 record_cond (&cond_equivalences[i]);
1763 /* Now thread the edge. */
1764 dom_thread_across_edge (walk_data, false_edge);
1766 /* No need to remove local expressions from our tables
1767 or restore vars to their original value as that will
1768 be done immediately below. */
1772 remove_local_expressions_from_table ();
1773 restore_vars_to_original_value ();
1775 /* If we queued any statements to rescan in this block, then
1776 go ahead and rescan them now. */
1777 while (VEC_length (gimple, stmts_to_rescan) > 0)
1779 gimple stmt = VEC_last (gimple, stmts_to_rescan);
1780 basic_block stmt_bb = gimple_bb (stmt);
1782 if (stmt_bb != bb)
1783 break;
1785 VEC_pop (gimple, stmts_to_rescan);
1786 update_stmt (stmt);
1790 /* Search for redundant computations in STMT. If any are found, then
1791 replace them with the variable holding the result of the computation.
1793 If safe, record this expression into the available expression hash
1794 table. */
1796 static bool
1797 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1799 tree expr_type;
1800 tree cached_lhs;
1801 bool insert = true;
1802 bool retval = false;
1803 bool assigns_var_p = false;
1805 gimple stmt = gsi_stmt (*gsi);
1807 tree def = gimple_get_lhs (stmt);
1809 /* Certain expressions on the RHS can be optimized away, but can not
1810 themselves be entered into the hash tables. */
1811 if (! def
1812 || TREE_CODE (def) != SSA_NAME
1813 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1814 || gimple_vdef (stmt)
1815 /* Do not record equivalences for increments of ivs. This would create
1816 overlapping live ranges for a very questionable gain. */
1817 || simple_iv_increment_p (stmt))
1818 insert = false;
1820 /* Check if the expression has been computed before. */
1821 cached_lhs = lookup_avail_expr (stmt, insert);
1823 opt_stats.num_exprs_considered++;
1825 /* Get the type of the expression we are trying to optimize. */
1826 if (is_gimple_assign (stmt))
1828 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1829 assigns_var_p = true;
1831 else if (gimple_code (stmt) == GIMPLE_COND)
1832 expr_type = boolean_type_node;
1833 else if (is_gimple_call (stmt))
1835 gcc_assert (gimple_call_lhs (stmt));
1836 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1837 assigns_var_p = true;
1839 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1840 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1841 else
1842 gcc_unreachable ();
1844 if (!cached_lhs)
1845 return false;
1847 /* It is safe to ignore types here since we have already done
1848 type checking in the hashing and equality routines. In fact
1849 type checking here merely gets in the way of constant
1850 propagation. Also, make sure that it is safe to propagate
1851 CACHED_LHS into the expression in STMT. */
1852 if ((TREE_CODE (cached_lhs) != SSA_NAME
1853 && (assigns_var_p
1854 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1855 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1857 #if defined ENABLE_CHECKING
1858 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1859 || is_gimple_min_invariant (cached_lhs));
1860 #endif
1862 if (dump_file && (dump_flags & TDF_DETAILS))
1864 fprintf (dump_file, " Replaced redundant expr '");
1865 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1866 fprintf (dump_file, "' with '");
1867 print_generic_expr (dump_file, cached_lhs, dump_flags);
1868 fprintf (dump_file, "'\n");
1871 opt_stats.num_re++;
1873 if (TREE_CODE (cached_lhs) == ADDR_EXPR
1874 || (POINTER_TYPE_P (expr_type)
1875 && is_gimple_min_invariant (cached_lhs)))
1876 retval = true;
1878 if (assigns_var_p
1879 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1880 cached_lhs = fold_convert (expr_type, cached_lhs);
1882 propagate_tree_value_into_stmt (gsi, cached_lhs);
1884 /* Since it is always necessary to mark the result as modified,
1885 perhaps we should move this into propagate_tree_value_into_stmt
1886 itself. */
1887 gimple_set_modified (gsi_stmt (*gsi), true);
1889 return retval;
1892 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1893 the available expressions table or the const_and_copies table.
1894 Detect and record those equivalences. */
1895 /* We handle only very simple copy equivalences here. The heavy
1896 lifing is done by eliminate_redundant_computations. */
1898 static void
1899 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1901 tree lhs;
1902 enum tree_code lhs_code;
1904 gcc_assert (is_gimple_assign (stmt));
1906 lhs = gimple_assign_lhs (stmt);
1907 lhs_code = TREE_CODE (lhs);
1909 if (lhs_code == SSA_NAME
1910 && gimple_assign_single_p (stmt))
1912 tree rhs = gimple_assign_rhs1 (stmt);
1914 /* If the RHS of the assignment is a constant or another variable that
1915 may be propagated, register it in the CONST_AND_COPIES table. We
1916 do not need to record unwind data for this, since this is a true
1917 assignment and not an equivalence inferred from a comparison. All
1918 uses of this ssa name are dominated by this assignment, so unwinding
1919 just costs time and space. */
1920 if (may_optimize_p
1921 && (TREE_CODE (rhs) == SSA_NAME
1922 || is_gimple_min_invariant (rhs)))
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1926 fprintf (dump_file, "==== ASGN ");
1927 print_generic_expr (dump_file, lhs, 0);
1928 fprintf (dump_file, " = ");
1929 print_generic_expr (dump_file, rhs, 0);
1930 fprintf (dump_file, "\n");
1933 set_ssa_name_value (lhs, rhs);
1937 /* A memory store, even an aliased store, creates a useful
1938 equivalence. By exchanging the LHS and RHS, creating suitable
1939 vops and recording the result in the available expression table,
1940 we may be able to expose more redundant loads. */
1941 if (!gimple_has_volatile_ops (stmt)
1942 && gimple_references_memory_p (stmt)
1943 && gimple_assign_single_p (stmt)
1944 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1945 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1946 && !is_gimple_reg (lhs))
1948 tree rhs = gimple_assign_rhs1 (stmt);
1949 gimple new_stmt;
1951 /* Build a new statement with the RHS and LHS exchanged. */
1952 if (TREE_CODE (rhs) == SSA_NAME)
1954 /* NOTE tuples. The call to gimple_build_assign below replaced
1955 a call to build_gimple_modify_stmt, which did not set the
1956 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1957 may cause an SSA validation failure, as the LHS may be a
1958 default-initialized name and should have no definition. I'm
1959 a bit dubious of this, as the artificial statement that we
1960 generate here may in fact be ill-formed, but it is simply
1961 used as an internal device in this pass, and never becomes
1962 part of the CFG. */
1963 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1964 new_stmt = gimple_build_assign (rhs, lhs);
1965 SSA_NAME_DEF_STMT (rhs) = defstmt;
1967 else
1968 new_stmt = gimple_build_assign (rhs, lhs);
1970 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1972 /* Finally enter the statement into the available expression
1973 table. */
1974 lookup_avail_expr (new_stmt, true);
1978 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1979 CONST_AND_COPIES. */
1981 static bool
1982 cprop_operand (gimple stmt, use_operand_p op_p)
1984 bool may_have_exposed_new_symbols = false;
1985 tree val;
1986 tree op = USE_FROM_PTR (op_p);
1988 /* If the operand has a known constant value or it is known to be a
1989 copy of some other variable, use the value or copy stored in
1990 CONST_AND_COPIES. */
1991 val = SSA_NAME_VALUE (op);
1992 if (val && val != op)
1994 /* Do not change the base variable in the virtual operand
1995 tables. That would make it impossible to reconstruct
1996 the renamed virtual operand if we later modify this
1997 statement. Also only allow the new value to be an SSA_NAME
1998 for propagation into virtual operands. */
1999 if (!is_gimple_reg (op)
2000 && (TREE_CODE (val) != SSA_NAME
2001 || is_gimple_reg (val)
2002 || get_virtual_var (val) != get_virtual_var (op)))
2003 return false;
2005 /* Do not replace hard register operands in asm statements. */
2006 if (gimple_code (stmt) == GIMPLE_ASM
2007 && !may_propagate_copy_into_asm (op))
2008 return false;
2010 /* Certain operands are not allowed to be copy propagated due
2011 to their interaction with exception handling and some GCC
2012 extensions. */
2013 if (!may_propagate_copy (op, val))
2014 return false;
2016 /* Do not propagate addresses that point to volatiles into memory
2017 stmts without volatile operands. */
2018 if (POINTER_TYPE_P (TREE_TYPE (val))
2019 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2020 && gimple_has_mem_ops (stmt)
2021 && !gimple_has_volatile_ops (stmt))
2022 return false;
2024 /* Do not propagate copies if the propagated value is at a deeper loop
2025 depth than the propagatee. Otherwise, this may move loop variant
2026 variables outside of their loops and prevent coalescing
2027 opportunities. If the value was loop invariant, it will be hoisted
2028 by LICM and exposed for copy propagation. */
2029 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2030 return false;
2032 /* Dump details. */
2033 if (dump_file && (dump_flags & TDF_DETAILS))
2035 fprintf (dump_file, " Replaced '");
2036 print_generic_expr (dump_file, op, dump_flags);
2037 fprintf (dump_file, "' with %s '",
2038 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2039 print_generic_expr (dump_file, val, dump_flags);
2040 fprintf (dump_file, "'\n");
2043 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2044 that we may have exposed a new symbol for SSA renaming. */
2045 if (TREE_CODE (val) == ADDR_EXPR
2046 || (POINTER_TYPE_P (TREE_TYPE (op))
2047 && is_gimple_min_invariant (val)))
2048 may_have_exposed_new_symbols = true;
2050 if (TREE_CODE (val) != SSA_NAME)
2051 opt_stats.num_const_prop++;
2052 else
2053 opt_stats.num_copy_prop++;
2055 propagate_value (op_p, val);
2057 /* And note that we modified this statement. This is now
2058 safe, even if we changed virtual operands since we will
2059 rescan the statement and rewrite its operands again. */
2060 gimple_set_modified (stmt, true);
2062 return may_have_exposed_new_symbols;
2065 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2066 known value for that SSA_NAME (or NULL if no value is known).
2068 Propagate values from CONST_AND_COPIES into the uses, vuses and
2069 vdef_ops of STMT. */
2071 static bool
2072 cprop_into_stmt (gimple stmt)
2074 bool may_have_exposed_new_symbols = false;
2075 use_operand_p op_p;
2076 ssa_op_iter iter;
2078 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2080 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2081 may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
2084 return may_have_exposed_new_symbols;
2087 /* Optimize the statement pointed to by iterator SI.
2089 We try to perform some simplistic global redundancy elimination and
2090 constant propagation:
2092 1- To detect global redundancy, we keep track of expressions that have
2093 been computed in this block and its dominators. If we find that the
2094 same expression is computed more than once, we eliminate repeated
2095 computations by using the target of the first one.
2097 2- Constant values and copy assignments. This is used to do very
2098 simplistic constant and copy propagation. When a constant or copy
2099 assignment is found, we map the value on the RHS of the assignment to
2100 the variable in the LHS in the CONST_AND_COPIES table. */
2102 static void
2103 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2105 gimple stmt, old_stmt;
2106 bool may_optimize_p;
2107 bool may_have_exposed_new_symbols;
2108 bool modified_p = false;
2110 old_stmt = stmt = gsi_stmt (si);
2112 if (gimple_code (stmt) == GIMPLE_COND)
2113 canonicalize_comparison (stmt);
2115 update_stmt_if_modified (stmt);
2116 opt_stats.num_stmts++;
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2120 fprintf (dump_file, "Optimizing statement ");
2121 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2124 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2125 may_have_exposed_new_symbols = cprop_into_stmt (stmt);
2127 /* If the statement has been modified with constant replacements,
2128 fold its RHS before checking for redundant computations. */
2129 if (gimple_modified_p (stmt))
2131 tree rhs = NULL;
2133 /* Try to fold the statement making sure that STMT is kept
2134 up to date. */
2135 if (fold_stmt (&si))
2137 stmt = gsi_stmt (si);
2139 if (dump_file && (dump_flags & TDF_DETAILS))
2141 fprintf (dump_file, " Folded to: ");
2142 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2146 /* We only need to consider cases that can yield a gimple operand. */
2147 if (gimple_assign_single_p (stmt))
2148 rhs = gimple_assign_rhs1 (stmt);
2149 else if (gimple_code (stmt) == GIMPLE_GOTO)
2150 rhs = gimple_goto_dest (stmt);
2151 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2152 /* This should never be an ADDR_EXPR. */
2153 rhs = gimple_switch_index (stmt);
2155 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2156 recompute_tree_invariant_for_addr_expr (rhs);
2158 /* Constant/copy propagation above may change the set of
2159 virtual operands associated with this statement. Folding
2160 may remove the need for some virtual operands.
2162 Indicate we will need to rescan and rewrite the statement. */
2163 may_have_exposed_new_symbols = true;
2164 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2165 even if fold_stmt updated the stmt already and thus cleared
2166 gimple_modified_p flag on it. */
2167 modified_p = true;
2170 /* Check for redundant computations. Do this optimization only
2171 for assignments that have no volatile ops and conditionals. */
2172 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2173 && ((is_gimple_assign (stmt)
2174 && !gimple_rhs_has_side_effects (stmt))
2175 || (is_gimple_call (stmt)
2176 && gimple_call_lhs (stmt) != NULL_TREE
2177 && !gimple_rhs_has_side_effects (stmt))
2178 || gimple_code (stmt) == GIMPLE_COND
2179 || gimple_code (stmt) == GIMPLE_SWITCH));
2181 if (may_optimize_p)
2183 may_have_exposed_new_symbols |= eliminate_redundant_computations (&si);
2184 stmt = gsi_stmt (si);
2187 /* Record any additional equivalences created by this statement. */
2188 if (is_gimple_assign (stmt))
2189 record_equivalences_from_stmt (stmt, may_optimize_p);
2191 /* If STMT is a COND_EXPR and it was modified, then we may know
2192 where it goes. If that is the case, then mark the CFG as altered.
2194 This will cause us to later call remove_unreachable_blocks and
2195 cleanup_tree_cfg when it is safe to do so. It is not safe to
2196 clean things up here since removal of edges and such can trigger
2197 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2198 the manager.
2200 That's all fine and good, except that once SSA_NAMEs are released
2201 to the manager, we must not call create_ssa_name until all references
2202 to released SSA_NAMEs have been eliminated.
2204 All references to the deleted SSA_NAMEs can not be eliminated until
2205 we remove unreachable blocks.
2207 We can not remove unreachable blocks until after we have completed
2208 any queued jump threading.
2210 We can not complete any queued jump threads until we have taken
2211 appropriate variables out of SSA form. Taking variables out of
2212 SSA form can call create_ssa_name and thus we lose.
2214 Ultimately I suspect we're going to need to change the interface
2215 into the SSA_NAME manager. */
2216 if (gimple_modified_p (stmt) || modified_p)
2218 tree val = NULL;
2220 if (gimple_code (stmt) == GIMPLE_COND)
2221 val = fold_binary (gimple_cond_code (stmt), boolean_type_node,
2222 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2223 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2224 val = gimple_switch_index (stmt);
2226 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2227 cfg_altered = true;
2229 /* If we simplified a statement in such a way as to be shown that it
2230 cannot trap, update the eh information and the cfg to match. */
2231 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2233 bitmap_set_bit (need_eh_cleanup, bb->index);
2234 if (dump_file && (dump_flags & TDF_DETAILS))
2235 fprintf (dump_file, " Flagged to clear EH edges.\n");
2239 /* Queue the statement to be re-scanned after all the
2240 AVAIL_EXPRS have been processed. The change buffer stack for
2241 all the pushed statements will be processed when this queue
2242 is emptied. */
2243 if (may_have_exposed_new_symbols)
2244 VEC_safe_push (gimple, heap, stmts_to_rescan, gsi_stmt (si));
2247 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2248 If found, return its LHS. Otherwise insert STMT in the table and
2249 return NULL_TREE.
2251 Also, when an expression is first inserted in the table, it is also
2252 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2253 we finish processing this block and its children. */
2255 static tree
2256 lookup_avail_expr (gimple stmt, bool insert)
2258 void **slot;
2259 tree lhs;
2260 tree temp;
2261 struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
2263 /* Get LHS of assignment or call, else NULL_TREE. */
2264 lhs = gimple_get_lhs (stmt);
2266 initialize_hash_element (stmt, lhs, element);
2268 if (dump_file && (dump_flags & TDF_DETAILS))
2270 fprintf (dump_file, "LKUP ");
2271 print_expr_hash_elt (dump_file, element);
2274 /* Don't bother remembering constant assignments and copy operations.
2275 Constants and copy operations are handled by the constant/copy propagator
2276 in optimize_stmt. */
2277 if (element->expr.kind == EXPR_SINGLE
2278 && (TREE_CODE (element->expr.ops.single.rhs) == SSA_NAME
2279 || is_gimple_min_invariant (element->expr.ops.single.rhs)))
2281 free (element);
2282 return NULL_TREE;
2285 /* Finally try to find the expression in the main expression hash table. */
2286 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
2287 (insert ? INSERT : NO_INSERT));
2288 if (slot == NULL)
2290 free (element);
2291 return NULL_TREE;
2294 if (*slot == NULL)
2296 *slot = (void *) element;
2298 if (dump_file && (dump_flags & TDF_DETAILS))
2300 fprintf (dump_file, "2>>> ");
2301 print_expr_hash_elt (dump_file, element);
2304 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
2305 return NULL_TREE;
2308 /* Extract the LHS of the assignment so that it can be used as the current
2309 definition of another variable. */
2310 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2312 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2313 use the value from the const_and_copies table. */
2314 if (TREE_CODE (lhs) == SSA_NAME)
2316 temp = SSA_NAME_VALUE (lhs);
2317 if (temp)
2318 lhs = temp;
2321 free (element);
2323 if (dump_file && (dump_flags & TDF_DETAILS))
2325 fprintf (dump_file, "FIND: ");
2326 print_generic_expr (dump_file, lhs, 0);
2327 fprintf (dump_file, "\n");
2330 return lhs;
2333 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2334 for expressions using the code of the expression and the SSA numbers of
2335 its operands. */
2337 static hashval_t
2338 avail_expr_hash (const void *p)
2340 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2341 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2342 tree vuse;
2343 hashval_t val = 0;
2345 val = iterative_hash_hashable_expr (expr, val);
2347 /* If the hash table entry is not associated with a statement, then we
2348 can just hash the expression and not worry about virtual operands
2349 and such. */
2350 if (!stmt)
2351 return val;
2353 /* Add the SSA version numbers of the vuse operand. This is important
2354 because compound variables like arrays are not renamed in the
2355 operands. Rather, the rename is done on the virtual variable
2356 representing all the elements of the array. */
2357 if ((vuse = gimple_vuse (stmt)))
2358 val = iterative_hash_expr (vuse, val);
2360 return val;
2363 static hashval_t
2364 real_avail_expr_hash (const void *p)
2366 return ((const struct expr_hash_elt *)p)->hash;
2369 static int
2370 avail_expr_eq (const void *p1, const void *p2)
2372 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2373 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2374 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2375 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2376 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2377 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2379 /* This case should apply only when removing entries from the table. */
2380 if (stamp1 == stamp2)
2381 return true;
2383 /* FIXME tuples:
2384 We add stmts to a hash table and them modify them. To detect the case
2385 that we modify a stmt and then search for it, we assume that the hash
2386 is always modified by that change.
2387 We have to fully check why this doesn't happen on trunk or rewrite
2388 this in a more reliable (and easier to understand) way. */
2389 if (((const struct expr_hash_elt *)p1)->hash
2390 != ((const struct expr_hash_elt *)p2)->hash)
2391 return false;
2393 /* In case of a collision, both RHS have to be identical and have the
2394 same VUSE operands. */
2395 if (hashable_expr_equal_p (expr1, expr2)
2396 && types_compatible_p (expr1->type, expr2->type))
2398 /* Note that STMT1 and/or STMT2 may be NULL. */
2399 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2400 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2403 return false;
2406 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2407 up degenerate PHIs created by or exposed by jump threading. */
2409 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2410 NULL. */
2412 tree
2413 degenerate_phi_result (gimple phi)
2415 tree lhs = gimple_phi_result (phi);
2416 tree val = NULL;
2417 size_t i;
2419 /* Ignoring arguments which are the same as LHS, if all the remaining
2420 arguments are the same, then the PHI is a degenerate and has the
2421 value of that common argument. */
2422 for (i = 0; i < gimple_phi_num_args (phi); i++)
2424 tree arg = gimple_phi_arg_def (phi, i);
2426 if (arg == lhs)
2427 continue;
2428 else if (!val)
2429 val = arg;
2430 else if (!operand_equal_p (arg, val, 0))
2431 break;
2433 return (i == gimple_phi_num_args (phi) ? val : NULL);
2436 /* Given a statement STMT, which is either a PHI node or an assignment,
2437 remove it from the IL. */
2439 static void
2440 remove_stmt_or_phi (gimple stmt)
2442 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2444 if (gimple_code (stmt) == GIMPLE_PHI)
2445 remove_phi_node (&gsi, true);
2446 else
2448 gsi_remove (&gsi, true);
2449 release_defs (stmt);
2453 /* Given a statement STMT, which is either a PHI node or an assignment,
2454 return the "rhs" of the node, in the case of a non-degenerate
2455 phi, NULL is returned. */
2457 static tree
2458 get_rhs_or_phi_arg (gimple stmt)
2460 if (gimple_code (stmt) == GIMPLE_PHI)
2461 return degenerate_phi_result (stmt);
2462 else if (gimple_assign_single_p (stmt))
2463 return gimple_assign_rhs1 (stmt);
2464 else
2465 gcc_unreachable ();
2469 /* Given a statement STMT, which is either a PHI node or an assignment,
2470 return the "lhs" of the node. */
2472 static tree
2473 get_lhs_or_phi_result (gimple stmt)
2475 if (gimple_code (stmt) == GIMPLE_PHI)
2476 return gimple_phi_result (stmt);
2477 else if (is_gimple_assign (stmt))
2478 return gimple_assign_lhs (stmt);
2479 else
2480 gcc_unreachable ();
2483 /* Propagate RHS into all uses of LHS (when possible).
2485 RHS and LHS are derived from STMT, which is passed in solely so
2486 that we can remove it if propagation is successful.
2488 When propagating into a PHI node or into a statement which turns
2489 into a trivial copy or constant initialization, set the
2490 appropriate bit in INTERESTING_NAMEs so that we will visit those
2491 nodes as well in an effort to pick up secondary optimization
2492 opportunities. */
2494 static void
2495 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2497 /* First verify that propagation is valid and isn't going to move a
2498 loop variant variable outside its loop. */
2499 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2500 && (TREE_CODE (rhs) != SSA_NAME
2501 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2502 && may_propagate_copy (lhs, rhs)
2503 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2505 use_operand_p use_p;
2506 imm_use_iterator iter;
2507 gimple use_stmt;
2508 bool all = true;
2510 /* Dump details. */
2511 if (dump_file && (dump_flags & TDF_DETAILS))
2513 fprintf (dump_file, " Replacing '");
2514 print_generic_expr (dump_file, lhs, dump_flags);
2515 fprintf (dump_file, "' with %s '",
2516 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2517 print_generic_expr (dump_file, rhs, dump_flags);
2518 fprintf (dump_file, "'\n");
2521 /* Walk over every use of LHS and try to replace the use with RHS.
2522 At this point the only reason why such a propagation would not
2523 be successful would be if the use occurs in an ASM_EXPR. */
2524 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2527 /* It's not always safe to propagate into an ASM_EXPR. */
2528 if (gimple_code (use_stmt) == GIMPLE_ASM
2529 && ! may_propagate_copy_into_asm (lhs))
2531 all = false;
2532 continue;
2535 /* Dump details. */
2536 if (dump_file && (dump_flags & TDF_DETAILS))
2538 fprintf (dump_file, " Original statement:");
2539 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2542 /* Propagate the RHS into this use of the LHS. */
2543 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2544 propagate_value (use_p, rhs);
2546 /* Special cases to avoid useless calls into the folding
2547 routines, operand scanning, etc.
2549 First, propagation into a PHI may cause the PHI to become
2550 a degenerate, so mark the PHI as interesting. No other
2551 actions are necessary.
2553 Second, if we're propagating a virtual operand and the
2554 propagation does not change the underlying _DECL node for
2555 the virtual operand, then no further actions are necessary. */
2556 if (gimple_code (use_stmt) == GIMPLE_PHI
2557 || (! is_gimple_reg (lhs)
2558 && TREE_CODE (rhs) == SSA_NAME
2559 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2561 /* Dump details. */
2562 if (dump_file && (dump_flags & TDF_DETAILS))
2564 fprintf (dump_file, " Updated statement:");
2565 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2568 /* Propagation into a PHI may expose new degenerate PHIs,
2569 so mark the result of the PHI as interesting. */
2570 if (gimple_code (use_stmt) == GIMPLE_PHI)
2572 tree result = get_lhs_or_phi_result (use_stmt);
2573 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2576 continue;
2579 /* From this point onward we are propagating into a
2580 real statement. Folding may (or may not) be possible,
2581 we may expose new operands, expose dead EH edges,
2582 etc. */
2583 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2584 cannot fold a call that simplifies to a constant,
2585 because the GIMPLE_CALL must be replaced by a
2586 GIMPLE_ASSIGN, and there is no way to effect such a
2587 transformation in-place. We might want to consider
2588 using the more general fold_stmt here. */
2589 fold_stmt_inplace (use_stmt);
2591 /* Sometimes propagation can expose new operands to the
2592 renamer. */
2593 update_stmt (use_stmt);
2595 /* Dump details. */
2596 if (dump_file && (dump_flags & TDF_DETAILS))
2598 fprintf (dump_file, " Updated statement:");
2599 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2602 /* If we replaced a variable index with a constant, then
2603 we would need to update the invariant flag for ADDR_EXPRs. */
2604 if (gimple_assign_single_p (use_stmt)
2605 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2606 recompute_tree_invariant_for_addr_expr
2607 (gimple_assign_rhs1 (use_stmt));
2609 /* If we cleaned up EH information from the statement,
2610 mark its containing block as needing EH cleanups. */
2611 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2613 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2614 if (dump_file && (dump_flags & TDF_DETAILS))
2615 fprintf (dump_file, " Flagged to clear EH edges.\n");
2618 /* Propagation may expose new trivial copy/constant propagation
2619 opportunities. */
2620 if (gimple_assign_single_p (use_stmt)
2621 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2622 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2623 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2625 tree result = get_lhs_or_phi_result (use_stmt);
2626 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2629 /* Propagation into these nodes may make certain edges in
2630 the CFG unexecutable. We want to identify them as PHI nodes
2631 at the destination of those unexecutable edges may become
2632 degenerates. */
2633 else if (gimple_code (use_stmt) == GIMPLE_COND
2634 || gimple_code (use_stmt) == GIMPLE_SWITCH
2635 || gimple_code (use_stmt) == GIMPLE_GOTO)
2637 tree val;
2639 if (gimple_code (use_stmt) == GIMPLE_COND)
2640 val = fold_binary (gimple_cond_code (use_stmt),
2641 boolean_type_node,
2642 gimple_cond_lhs (use_stmt),
2643 gimple_cond_rhs (use_stmt));
2644 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2645 val = gimple_switch_index (use_stmt);
2646 else
2647 val = gimple_goto_dest (use_stmt);
2649 if (val && is_gimple_min_invariant (val))
2651 basic_block bb = gimple_bb (use_stmt);
2652 edge te = find_taken_edge (bb, val);
2653 edge_iterator ei;
2654 edge e;
2655 gimple_stmt_iterator gsi, psi;
2657 /* Remove all outgoing edges except TE. */
2658 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2660 if (e != te)
2662 /* Mark all the PHI nodes at the destination of
2663 the unexecutable edge as interesting. */
2664 for (psi = gsi_start_phis (e->dest);
2665 !gsi_end_p (psi);
2666 gsi_next (&psi))
2668 gimple phi = gsi_stmt (psi);
2670 tree result = gimple_phi_result (phi);
2671 int version = SSA_NAME_VERSION (result);
2673 bitmap_set_bit (interesting_names, version);
2676 te->probability += e->probability;
2678 te->count += e->count;
2679 remove_edge (e);
2680 cfg_altered = true;
2682 else
2683 ei_next (&ei);
2686 gsi = gsi_last_bb (gimple_bb (use_stmt));
2687 gsi_remove (&gsi, true);
2689 /* And fixup the flags on the single remaining edge. */
2690 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2691 te->flags &= ~EDGE_ABNORMAL;
2692 te->flags |= EDGE_FALLTHRU;
2693 if (te->probability > REG_BR_PROB_BASE)
2694 te->probability = REG_BR_PROB_BASE;
2699 /* Ensure there is nothing else to do. */
2700 gcc_assert (!all || has_zero_uses (lhs));
2702 /* If we were able to propagate away all uses of LHS, then
2703 we can remove STMT. */
2704 if (all)
2705 remove_stmt_or_phi (stmt);
2709 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2710 a statement that is a trivial copy or constant initialization.
2712 Attempt to eliminate T by propagating its RHS into all uses of
2713 its LHS. This may in turn set new bits in INTERESTING_NAMES
2714 for nodes we want to revisit later.
2716 All exit paths should clear INTERESTING_NAMES for the result
2717 of STMT. */
2719 static void
2720 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2722 tree lhs = get_lhs_or_phi_result (stmt);
2723 tree rhs;
2724 int version = SSA_NAME_VERSION (lhs);
2726 /* If the LHS of this statement or PHI has no uses, then we can
2727 just eliminate it. This can occur if, for example, the PHI
2728 was created by block duplication due to threading and its only
2729 use was in the conditional at the end of the block which was
2730 deleted. */
2731 if (has_zero_uses (lhs))
2733 bitmap_clear_bit (interesting_names, version);
2734 remove_stmt_or_phi (stmt);
2735 return;
2738 /* Get the RHS of the assignment or PHI node if the PHI is a
2739 degenerate. */
2740 rhs = get_rhs_or_phi_arg (stmt);
2741 if (!rhs)
2743 bitmap_clear_bit (interesting_names, version);
2744 return;
2747 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2749 /* Note that STMT may well have been deleted by now, so do
2750 not access it, instead use the saved version # to clear
2751 T's entry in the worklist. */
2752 bitmap_clear_bit (interesting_names, version);
2755 /* The first phase in degenerate PHI elimination.
2757 Eliminate the degenerate PHIs in BB, then recurse on the
2758 dominator children of BB. */
2760 static void
2761 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2763 gimple_stmt_iterator gsi;
2764 basic_block son;
2766 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2768 gimple phi = gsi_stmt (gsi);
2770 eliminate_const_or_copy (phi, interesting_names);
2773 /* Recurse into the dominator children of BB. */
2774 for (son = first_dom_son (CDI_DOMINATORS, bb);
2775 son;
2776 son = next_dom_son (CDI_DOMINATORS, son))
2777 eliminate_degenerate_phis_1 (son, interesting_names);
2781 /* A very simple pass to eliminate degenerate PHI nodes from the
2782 IL. This is meant to be fast enough to be able to be run several
2783 times in the optimization pipeline.
2785 Certain optimizations, particularly those which duplicate blocks
2786 or remove edges from the CFG can create or expose PHIs which are
2787 trivial copies or constant initializations.
2789 While we could pick up these optimizations in DOM or with the
2790 combination of copy-prop and CCP, those solutions are far too
2791 heavy-weight for our needs.
2793 This implementation has two phases so that we can efficiently
2794 eliminate the first order degenerate PHIs and second order
2795 degenerate PHIs.
2797 The first phase performs a dominator walk to identify and eliminate
2798 the vast majority of the degenerate PHIs. When a degenerate PHI
2799 is identified and eliminated any affected statements or PHIs
2800 are put on a worklist.
2802 The second phase eliminates degenerate PHIs and trivial copies
2803 or constant initializations using the worklist. This is how we
2804 pick up the secondary optimization opportunities with minimal
2805 cost. */
2807 static unsigned int
2808 eliminate_degenerate_phis (void)
2810 bitmap interesting_names;
2811 bitmap interesting_names1;
2813 /* Bitmap of blocks which need EH information updated. We can not
2814 update it on-the-fly as doing so invalidates the dominator tree. */
2815 need_eh_cleanup = BITMAP_ALLOC (NULL);
2817 /* INTERESTING_NAMES is effectively our worklist, indexed by
2818 SSA_NAME_VERSION.
2820 A set bit indicates that the statement or PHI node which
2821 defines the SSA_NAME should be (re)examined to determine if
2822 it has become a degenerate PHI or trivial const/copy propagation
2823 opportunity.
2825 Experiments have show we generally get better compilation
2826 time behavior with bitmaps rather than sbitmaps. */
2827 interesting_names = BITMAP_ALLOC (NULL);
2828 interesting_names1 = BITMAP_ALLOC (NULL);
2830 calculate_dominance_info (CDI_DOMINATORS);
2831 cfg_altered = false;
2833 /* First phase. Eliminate degenerate PHIs via a dominator
2834 walk of the CFG.
2836 Experiments have indicated that we generally get better
2837 compile-time behavior by visiting blocks in the first
2838 phase in dominator order. Presumably this is because walking
2839 in dominator order leaves fewer PHIs for later examination
2840 by the worklist phase. */
2841 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2843 /* Second phase. Eliminate second order degenerate PHIs as well
2844 as trivial copies or constant initializations identified by
2845 the first phase or this phase. Basically we keep iterating
2846 until our set of INTERESTING_NAMEs is empty. */
2847 while (!bitmap_empty_p (interesting_names))
2849 unsigned int i;
2850 bitmap_iterator bi;
2852 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2853 changed during the loop. Copy it to another bitmap and
2854 use that. */
2855 bitmap_copy (interesting_names1, interesting_names);
2857 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2859 tree name = ssa_name (i);
2861 /* Ignore SSA_NAMEs that have been released because
2862 their defining statement was deleted (unreachable). */
2863 if (name)
2864 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2865 interesting_names);
2869 if (cfg_altered)
2870 free_dominance_info (CDI_DOMINATORS);
2872 /* Propagation of const and copies may make some EH edges dead. Purge
2873 such edges from the CFG as needed. */
2874 if (!bitmap_empty_p (need_eh_cleanup))
2876 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2877 BITMAP_FREE (need_eh_cleanup);
2880 BITMAP_FREE (interesting_names);
2881 BITMAP_FREE (interesting_names1);
2882 return 0;
2885 struct gimple_opt_pass pass_phi_only_cprop =
2888 GIMPLE_PASS,
2889 "phicprop", /* name */
2890 gate_dominator, /* gate */
2891 eliminate_degenerate_phis, /* execute */
2892 NULL, /* sub */
2893 NULL, /* next */
2894 0, /* static_pass_number */
2895 TV_TREE_PHI_CPROP, /* tv_id */
2896 PROP_cfg | PROP_ssa, /* properties_required */
2897 0, /* properties_provided */
2898 0, /* properties_destroyed */
2899 0, /* todo_flags_start */
2900 TODO_cleanup_cfg
2901 | TODO_dump_func
2902 | TODO_ggc_collect
2903 | TODO_verify_ssa
2904 | TODO_verify_stmts
2905 | TODO_update_ssa /* todo_flags_finish */