2010-05-28 Segher Boessenkool <segher@kernel.crashing.org>
[official-gcc.git] / gcc / tree-ssa-dom.c
blobb84532569889dd6b92b1f248f23888e1ee727fee
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "output.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "diagnostic.h"
35 #include "tree-pretty-print.h"
36 #include "gimple-pretty-print.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-propagate.h"
43 #include "langhooks.h"
44 #include "params.h"
46 /* This file implements optimizations on the dominator tree. */
48 /* Representation of a "naked" right-hand-side expression, to be used
49 in recording available expressions in the expression hash table. */
51 enum expr_kind
53 EXPR_SINGLE,
54 EXPR_UNARY,
55 EXPR_BINARY,
56 EXPR_CALL
59 struct hashable_expr
61 tree type;
62 enum expr_kind kind;
63 union {
64 struct { tree rhs; } single;
65 struct { enum tree_code op; tree opnd; } unary;
66 struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
67 struct { tree fn; bool pure; size_t nargs; tree *args; } call;
68 } ops;
71 /* Structure for recording known values of a conditional expression
72 at the exits from its block. */
74 struct cond_equivalence
76 struct hashable_expr cond;
77 tree value;
80 /* Structure for recording edge equivalences as well as any pending
81 edge redirections during the dominator optimizer.
83 Computing and storing the edge equivalences instead of creating
84 them on-demand can save significant amounts of time, particularly
85 for pathological cases involving switch statements.
87 These structures live for a single iteration of the dominator
88 optimizer in the edge's AUX field. At the end of an iteration we
89 free each of these structures and update the AUX field to point
90 to any requested redirection target (the code for updating the
91 CFG and SSA graph for edge redirection expects redirection edge
92 targets to be in the AUX field for each edge. */
94 struct edge_info
96 /* If this edge creates a simple equivalence, the LHS and RHS of
97 the equivalence will be stored here. */
98 tree lhs;
99 tree rhs;
101 /* Traversing an edge may also indicate one or more particular conditions
102 are true or false. The number of recorded conditions can vary, but
103 can be determined by the condition's code. So we have an array
104 and its maximum index rather than use a varray. */
105 struct cond_equivalence *cond_equivalences;
106 unsigned int max_cond_equivalences;
109 /* Hash table with expressions made available during the renaming process.
110 When an assignment of the form X_i = EXPR is found, the statement is
111 stored in this table. If the same expression EXPR is later found on the
112 RHS of another statement, it is replaced with X_i (thus performing
113 global redundancy elimination). Similarly as we pass through conditionals
114 we record the conditional itself as having either a true or false value
115 in this table. */
116 static htab_t avail_exprs;
118 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
119 expressions it enters into the hash table along with a marker entry
120 (null). When we finish processing the block, we pop off entries and
121 remove the expressions from the global hash table until we hit the
122 marker. */
123 typedef struct expr_hash_elt * expr_hash_elt_t;
124 DEF_VEC_P(expr_hash_elt_t);
125 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
127 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
129 /* Structure for entries in the expression hash table. */
131 struct expr_hash_elt
133 /* The value (lhs) of this expression. */
134 tree lhs;
136 /* The expression (rhs) we want to record. */
137 struct hashable_expr expr;
139 /* The stmt pointer if this element corresponds to a statement. */
140 gimple stmt;
142 /* The hash value for RHS. */
143 hashval_t hash;
145 /* A unique stamp, typically the address of the hash
146 element itself, used in removing entries from the table. */
147 struct expr_hash_elt *stamp;
150 /* Stack of dest,src pairs that need to be restored during finalization.
152 A NULL entry is used to mark the end of pairs which need to be
153 restored during finalization of this block. */
154 static VEC(tree,heap) *const_and_copies_stack;
156 /* Track whether or not we have changed the control flow graph. */
157 static bool cfg_altered;
159 /* Bitmap of blocks that have had EH statements cleaned. We should
160 remove their dead edges eventually. */
161 static bitmap need_eh_cleanup;
163 /* Statistics for dominator optimizations. */
164 struct opt_stats_d
166 long num_stmts;
167 long num_exprs_considered;
168 long num_re;
169 long num_const_prop;
170 long num_copy_prop;
173 static struct opt_stats_d opt_stats;
175 /* Local functions. */
176 static void optimize_stmt (basic_block, gimple_stmt_iterator);
177 static tree lookup_avail_expr (gimple, bool);
178 static hashval_t avail_expr_hash (const void *);
179 static hashval_t real_avail_expr_hash (const void *);
180 static int avail_expr_eq (const void *, const void *);
181 static void htab_statistics (FILE *, htab_t);
182 static void record_cond (struct cond_equivalence *);
183 static void record_const_or_copy (tree, tree);
184 static void record_equality (tree, tree);
185 static void record_equivalences_from_phis (basic_block);
186 static void record_equivalences_from_incoming_edge (basic_block);
187 static void eliminate_redundant_computations (gimple_stmt_iterator *);
188 static void record_equivalences_from_stmt (gimple, int);
189 static void dom_thread_across_edge (struct dom_walk_data *, edge);
190 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
191 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
192 static void remove_local_expressions_from_table (void);
193 static void restore_vars_to_original_value (void);
194 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
197 /* Given a statement STMT, initialize the hash table element pointed to
198 by ELEMENT. */
200 static void
201 initialize_hash_element (gimple stmt, tree lhs,
202 struct expr_hash_elt *element)
204 enum gimple_code code = gimple_code (stmt);
205 struct hashable_expr *expr = &element->expr;
207 if (code == GIMPLE_ASSIGN)
209 enum tree_code subcode = gimple_assign_rhs_code (stmt);
211 expr->type = NULL_TREE;
213 switch (get_gimple_rhs_class (subcode))
215 case GIMPLE_SINGLE_RHS:
216 expr->kind = EXPR_SINGLE;
217 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
218 break;
219 case GIMPLE_UNARY_RHS:
220 expr->kind = EXPR_UNARY;
221 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
222 expr->ops.unary.op = subcode;
223 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
224 break;
225 case GIMPLE_BINARY_RHS:
226 expr->kind = EXPR_BINARY;
227 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
228 expr->ops.binary.op = subcode;
229 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
230 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
231 break;
232 default:
233 gcc_unreachable ();
236 else if (code == GIMPLE_COND)
238 expr->type = boolean_type_node;
239 expr->kind = EXPR_BINARY;
240 expr->ops.binary.op = gimple_cond_code (stmt);
241 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
242 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
244 else if (code == GIMPLE_CALL)
246 size_t nargs = gimple_call_num_args (stmt);
247 size_t i;
249 gcc_assert (gimple_call_lhs (stmt));
251 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
252 expr->kind = EXPR_CALL;
253 expr->ops.call.fn = gimple_call_fn (stmt);
255 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
256 expr->ops.call.pure = true;
257 else
258 expr->ops.call.pure = false;
260 expr->ops.call.nargs = nargs;
261 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
262 for (i = 0; i < nargs; i++)
263 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
265 else if (code == GIMPLE_SWITCH)
267 expr->type = TREE_TYPE (gimple_switch_index (stmt));
268 expr->kind = EXPR_SINGLE;
269 expr->ops.single.rhs = gimple_switch_index (stmt);
271 else if (code == GIMPLE_GOTO)
273 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
274 expr->kind = EXPR_SINGLE;
275 expr->ops.single.rhs = gimple_goto_dest (stmt);
277 else
278 gcc_unreachable ();
280 element->lhs = lhs;
281 element->stmt = stmt;
282 element->hash = avail_expr_hash (element);
283 element->stamp = element;
286 /* Given a conditional expression COND as a tree, initialize
287 a hashable_expr expression EXPR. The conditional must be a
288 comparison or logical negation. A constant or a variable is
289 not permitted. */
291 static void
292 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
294 expr->type = boolean_type_node;
296 if (COMPARISON_CLASS_P (cond))
298 expr->kind = EXPR_BINARY;
299 expr->ops.binary.op = TREE_CODE (cond);
300 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
301 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
303 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
305 expr->kind = EXPR_UNARY;
306 expr->ops.unary.op = TRUTH_NOT_EXPR;
307 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
309 else
310 gcc_unreachable ();
313 /* Given a hashable_expr expression EXPR and an LHS,
314 initialize the hash table element pointed to by ELEMENT. */
316 static void
317 initialize_hash_element_from_expr (struct hashable_expr *expr,
318 tree lhs,
319 struct expr_hash_elt *element)
321 element->expr = *expr;
322 element->lhs = lhs;
323 element->stmt = NULL;
324 element->hash = avail_expr_hash (element);
325 element->stamp = element;
328 /* Compare two hashable_expr structures for equivalence.
329 They are considered equivalent when the the expressions
330 they denote must necessarily be equal. The logic is intended
331 to follow that of operand_equal_p in fold-const.c */
333 static bool
334 hashable_expr_equal_p (const struct hashable_expr *expr0,
335 const struct hashable_expr *expr1)
337 tree type0 = expr0->type;
338 tree type1 = expr1->type;
340 /* If either type is NULL, there is nothing to check. */
341 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
342 return false;
344 /* If both types don't have the same signedness, precision, and mode,
345 then we can't consider them equal. */
346 if (type0 != type1
347 && (TREE_CODE (type0) == ERROR_MARK
348 || TREE_CODE (type1) == ERROR_MARK
349 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
350 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
351 || TYPE_MODE (type0) != TYPE_MODE (type1)))
352 return false;
354 if (expr0->kind != expr1->kind)
355 return false;
357 switch (expr0->kind)
359 case EXPR_SINGLE:
360 return operand_equal_p (expr0->ops.single.rhs,
361 expr1->ops.single.rhs, 0);
363 case EXPR_UNARY:
364 if (expr0->ops.unary.op != expr1->ops.unary.op)
365 return false;
367 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
368 || expr0->ops.unary.op == NON_LVALUE_EXPR)
369 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
370 return false;
372 return operand_equal_p (expr0->ops.unary.opnd,
373 expr1->ops.unary.opnd, 0);
375 case EXPR_BINARY:
377 if (expr0->ops.binary.op != expr1->ops.binary.op)
378 return false;
380 if (operand_equal_p (expr0->ops.binary.opnd0,
381 expr1->ops.binary.opnd0, 0)
382 && operand_equal_p (expr0->ops.binary.opnd1,
383 expr1->ops.binary.opnd1, 0))
384 return true;
386 /* For commutative ops, allow the other order. */
387 return (commutative_tree_code (expr0->ops.binary.op)
388 && operand_equal_p (expr0->ops.binary.opnd0,
389 expr1->ops.binary.opnd1, 0)
390 && operand_equal_p (expr0->ops.binary.opnd1,
391 expr1->ops.binary.opnd0, 0));
394 case EXPR_CALL:
396 size_t i;
398 /* If the calls are to different functions, then they
399 clearly cannot be equal. */
400 if (! operand_equal_p (expr0->ops.call.fn,
401 expr1->ops.call.fn, 0))
402 return false;
404 if (! expr0->ops.call.pure)
405 return false;
407 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
408 return false;
410 for (i = 0; i < expr0->ops.call.nargs; i++)
411 if (! operand_equal_p (expr0->ops.call.args[i],
412 expr1->ops.call.args[i], 0))
413 return false;
415 return true;
418 default:
419 gcc_unreachable ();
423 /* Compute a hash value for a hashable_expr value EXPR and a
424 previously accumulated hash value VAL. If two hashable_expr
425 values compare equal with hashable_expr_equal_p, they must
426 hash to the same value, given an identical value of VAL.
427 The logic is intended to follow iterative_hash_expr in tree.c. */
429 static hashval_t
430 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
432 switch (expr->kind)
434 case EXPR_SINGLE:
435 val = iterative_hash_expr (expr->ops.single.rhs, val);
436 break;
438 case EXPR_UNARY:
439 val = iterative_hash_object (expr->ops.unary.op, val);
441 /* Make sure to include signedness in the hash computation.
442 Don't hash the type, that can lead to having nodes which
443 compare equal according to operand_equal_p, but which
444 have different hash codes. */
445 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
446 || expr->ops.unary.op == NON_LVALUE_EXPR)
447 val += TYPE_UNSIGNED (expr->type);
449 val = iterative_hash_expr (expr->ops.unary.opnd, val);
450 break;
452 case EXPR_BINARY:
453 val = iterative_hash_object (expr->ops.binary.op, val);
454 if (commutative_tree_code (expr->ops.binary.op))
455 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
456 expr->ops.binary.opnd1, val);
457 else
459 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
460 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
462 break;
464 case EXPR_CALL:
466 size_t i;
467 enum tree_code code = CALL_EXPR;
469 val = iterative_hash_object (code, val);
470 val = iterative_hash_expr (expr->ops.call.fn, val);
471 for (i = 0; i < expr->ops.call.nargs; i++)
472 val = iterative_hash_expr (expr->ops.call.args[i], val);
474 break;
476 default:
477 gcc_unreachable ();
480 return val;
483 /* Print a diagnostic dump of an expression hash table entry. */
485 static void
486 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
488 if (element->stmt)
489 fprintf (stream, "STMT ");
490 else
491 fprintf (stream, "COND ");
493 if (element->lhs)
495 print_generic_expr (stream, element->lhs, 0);
496 fprintf (stream, " = ");
499 switch (element->expr.kind)
501 case EXPR_SINGLE:
502 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
503 break;
505 case EXPR_UNARY:
506 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
507 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
508 break;
510 case EXPR_BINARY:
511 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
512 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
513 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
514 break;
516 case EXPR_CALL:
518 size_t i;
519 size_t nargs = element->expr.ops.call.nargs;
521 print_generic_expr (stream, element->expr.ops.call.fn, 0);
522 fprintf (stream, " (");
523 for (i = 0; i < nargs; i++)
525 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
526 if (i + 1 < nargs)
527 fprintf (stream, ", ");
529 fprintf (stream, ")");
531 break;
533 fprintf (stream, "\n");
535 if (element->stmt)
537 fprintf (stream, " ");
538 print_gimple_stmt (stream, element->stmt, 0, 0);
542 /* Delete an expr_hash_elt and reclaim its storage. */
544 static void
545 free_expr_hash_elt (void *elt)
547 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
549 if (element->expr.kind == EXPR_CALL)
550 free (element->expr.ops.call.args);
552 free (element);
555 /* Allocate an EDGE_INFO for edge E and attach it to E.
556 Return the new EDGE_INFO structure. */
558 static struct edge_info *
559 allocate_edge_info (edge e)
561 struct edge_info *edge_info;
563 edge_info = XCNEW (struct edge_info);
565 e->aux = edge_info;
566 return edge_info;
569 /* Free all EDGE_INFO structures associated with edges in the CFG.
570 If a particular edge can be threaded, copy the redirection
571 target from the EDGE_INFO structure into the edge's AUX field
572 as required by code to update the CFG and SSA graph for
573 jump threading. */
575 static void
576 free_all_edge_infos (void)
578 basic_block bb;
579 edge_iterator ei;
580 edge e;
582 FOR_EACH_BB (bb)
584 FOR_EACH_EDGE (e, ei, bb->preds)
586 struct edge_info *edge_info = (struct edge_info *) e->aux;
588 if (edge_info)
590 if (edge_info->cond_equivalences)
591 free (edge_info->cond_equivalences);
592 free (edge_info);
593 e->aux = NULL;
599 /* Jump threading, redundancy elimination and const/copy propagation.
601 This pass may expose new symbols that need to be renamed into SSA. For
602 every new symbol exposed, its corresponding bit will be set in
603 VARS_TO_RENAME. */
605 static unsigned int
606 tree_ssa_dominator_optimize (void)
608 struct dom_walk_data walk_data;
610 memset (&opt_stats, 0, sizeof (opt_stats));
612 /* Create our hash tables. */
613 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
614 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
615 const_and_copies_stack = VEC_alloc (tree, heap, 20);
616 need_eh_cleanup = BITMAP_ALLOC (NULL);
618 /* Setup callbacks for the generic dominator tree walker. */
619 walk_data.dom_direction = CDI_DOMINATORS;
620 walk_data.initialize_block_local_data = NULL;
621 walk_data.before_dom_children = dom_opt_enter_block;
622 walk_data.after_dom_children = dom_opt_leave_block;
623 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
624 When we attach more stuff we'll need to fill this out with a real
625 structure. */
626 walk_data.global_data = NULL;
627 walk_data.block_local_data_size = 0;
629 /* Now initialize the dominator walker. */
630 init_walk_dominator_tree (&walk_data);
632 calculate_dominance_info (CDI_DOMINATORS);
633 cfg_altered = false;
635 /* We need to know loop structures in order to avoid destroying them
636 in jump threading. Note that we still can e.g. thread through loop
637 headers to an exit edge, or through loop header to the loop body, assuming
638 that we update the loop info. */
639 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
641 /* Initialize the value-handle array. */
642 threadedge_initialize_values ();
644 /* We need accurate information regarding back edges in the CFG
645 for jump threading; this may include back edges that are not part of
646 a single loop. */
647 mark_dfs_back_edges ();
649 /* Recursively walk the dominator tree optimizing statements. */
650 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
653 gimple_stmt_iterator gsi;
654 basic_block bb;
655 FOR_EACH_BB (bb)
656 {for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
657 update_stmt_if_modified (gsi_stmt (gsi));
661 /* If we exposed any new variables, go ahead and put them into
662 SSA form now, before we handle jump threading. This simplifies
663 interactions between rewriting of _DECL nodes into SSA form
664 and rewriting SSA_NAME nodes into SSA form after block
665 duplication and CFG manipulation. */
666 update_ssa (TODO_update_ssa);
668 free_all_edge_infos ();
670 /* Thread jumps, creating duplicate blocks as needed. */
671 cfg_altered |= thread_through_all_blocks (first_pass_instance);
673 if (cfg_altered)
674 free_dominance_info (CDI_DOMINATORS);
676 /* Removal of statements may make some EH edges dead. Purge
677 such edges from the CFG as needed. */
678 if (!bitmap_empty_p (need_eh_cleanup))
680 unsigned i;
681 bitmap_iterator bi;
683 /* Jump threading may have created forwarder blocks from blocks
684 needing EH cleanup; the new successor of these blocks, which
685 has inherited from the original block, needs the cleanup. */
686 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
688 basic_block bb = BASIC_BLOCK (i);
689 if (single_succ_p (bb) == 1
690 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
692 bitmap_clear_bit (need_eh_cleanup, i);
693 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
697 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
698 bitmap_zero (need_eh_cleanup);
701 statistics_counter_event (cfun, "Redundant expressions eliminated",
702 opt_stats.num_re);
703 statistics_counter_event (cfun, "Constants propagated",
704 opt_stats.num_const_prop);
705 statistics_counter_event (cfun, "Copies propagated",
706 opt_stats.num_copy_prop);
708 /* Debugging dumps. */
709 if (dump_file && (dump_flags & TDF_STATS))
710 dump_dominator_optimization_stats (dump_file);
712 loop_optimizer_finalize ();
714 /* Delete our main hashtable. */
715 htab_delete (avail_exprs);
717 /* And finalize the dominator walker. */
718 fini_walk_dominator_tree (&walk_data);
720 /* Free asserted bitmaps and stacks. */
721 BITMAP_FREE (need_eh_cleanup);
723 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
724 VEC_free (tree, heap, const_and_copies_stack);
726 /* Free the value-handle array. */
727 threadedge_finalize_values ();
728 ssa_name_values = NULL;
730 return 0;
733 static bool
734 gate_dominator (void)
736 return flag_tree_dom != 0;
739 struct gimple_opt_pass pass_dominator =
742 GIMPLE_PASS,
743 "dom", /* name */
744 gate_dominator, /* gate */
745 tree_ssa_dominator_optimize, /* execute */
746 NULL, /* sub */
747 NULL, /* next */
748 0, /* static_pass_number */
749 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
750 PROP_cfg | PROP_ssa, /* properties_required */
751 0, /* properties_provided */
752 0, /* properties_destroyed */
753 0, /* todo_flags_start */
754 TODO_dump_func
755 | TODO_update_ssa
756 | TODO_cleanup_cfg
757 | TODO_verify_ssa /* todo_flags_finish */
762 /* Given a conditional statement CONDSTMT, convert the
763 condition to a canonical form. */
765 static void
766 canonicalize_comparison (gimple condstmt)
768 tree op0;
769 tree op1;
770 enum tree_code code;
772 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
774 op0 = gimple_cond_lhs (condstmt);
775 op1 = gimple_cond_rhs (condstmt);
777 code = gimple_cond_code (condstmt);
779 /* If it would be profitable to swap the operands, then do so to
780 canonicalize the statement, enabling better optimization.
782 By placing canonicalization of such expressions here we
783 transparently keep statements in canonical form, even
784 when the statement is modified. */
785 if (tree_swap_operands_p (op0, op1, false))
787 /* For relationals we need to swap the operands
788 and change the code. */
789 if (code == LT_EXPR
790 || code == GT_EXPR
791 || code == LE_EXPR
792 || code == GE_EXPR)
794 code = swap_tree_comparison (code);
796 gimple_cond_set_code (condstmt, code);
797 gimple_cond_set_lhs (condstmt, op1);
798 gimple_cond_set_rhs (condstmt, op0);
800 update_stmt (condstmt);
805 /* Initialize local stacks for this optimizer and record equivalences
806 upon entry to BB. Equivalences can come from the edge traversed to
807 reach BB or they may come from PHI nodes at the start of BB. */
809 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
810 LIMIT entries left in LOCALs. */
812 static void
813 remove_local_expressions_from_table (void)
815 /* Remove all the expressions made available in this block. */
816 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
818 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
819 void **slot;
821 if (victim == NULL)
822 break;
824 /* This must precede the actual removal from the hash table,
825 as ELEMENT and the table entry may share a call argument
826 vector which will be freed during removal. */
827 if (dump_file && (dump_flags & TDF_DETAILS))
829 fprintf (dump_file, "<<<< ");
830 print_expr_hash_elt (dump_file, victim);
833 slot = htab_find_slot_with_hash (avail_exprs,
834 victim, victim->hash, NO_INSERT);
835 gcc_assert (slot && *slot == (void *) victim);
836 htab_clear_slot (avail_exprs, slot);
840 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
841 CONST_AND_COPIES to its original state, stopping when we hit a
842 NULL marker. */
844 static void
845 restore_vars_to_original_value (void)
847 while (VEC_length (tree, const_and_copies_stack) > 0)
849 tree prev_value, dest;
851 dest = VEC_pop (tree, const_and_copies_stack);
853 if (dest == NULL)
854 break;
856 if (dump_file && (dump_flags & TDF_DETAILS))
858 fprintf (dump_file, "<<<< COPY ");
859 print_generic_expr (dump_file, dest, 0);
860 fprintf (dump_file, " = ");
861 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
862 fprintf (dump_file, "\n");
865 prev_value = VEC_pop (tree, const_and_copies_stack);
866 set_ssa_name_value (dest, prev_value);
870 /* A trivial wrapper so that we can present the generic jump
871 threading code with a simple API for simplifying statements. */
872 static tree
873 simplify_stmt_for_jump_threading (gimple stmt,
874 gimple within_stmt ATTRIBUTE_UNUSED)
876 return lookup_avail_expr (stmt, false);
879 /* Wrapper for common code to attempt to thread an edge. For example,
880 it handles lazily building the dummy condition and the bookkeeping
881 when jump threading is successful. */
883 static void
884 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
886 if (! walk_data->global_data)
888 gimple dummy_cond =
889 gimple_build_cond (NE_EXPR,
890 integer_zero_node, integer_zero_node,
891 NULL, NULL);
892 walk_data->global_data = dummy_cond;
895 thread_across_edge ((gimple) walk_data->global_data, e, false,
896 &const_and_copies_stack,
897 simplify_stmt_for_jump_threading);
900 /* PHI nodes can create equivalences too.
902 Ignoring any alternatives which are the same as the result, if
903 all the alternatives are equal, then the PHI node creates an
904 equivalence. */
906 static void
907 record_equivalences_from_phis (basic_block bb)
909 gimple_stmt_iterator gsi;
911 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
913 gimple phi = gsi_stmt (gsi);
915 tree lhs = gimple_phi_result (phi);
916 tree rhs = NULL;
917 size_t i;
919 for (i = 0; i < gimple_phi_num_args (phi); i++)
921 tree t = gimple_phi_arg_def (phi, i);
923 /* Ignore alternatives which are the same as our LHS. Since
924 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
925 can simply compare pointers. */
926 if (lhs == t)
927 continue;
929 /* If we have not processed an alternative yet, then set
930 RHS to this alternative. */
931 if (rhs == NULL)
932 rhs = t;
933 /* If we have processed an alternative (stored in RHS), then
934 see if it is equal to this one. If it isn't, then stop
935 the search. */
936 else if (! operand_equal_for_phi_arg_p (rhs, t))
937 break;
940 /* If we had no interesting alternatives, then all the RHS alternatives
941 must have been the same as LHS. */
942 if (!rhs)
943 rhs = lhs;
945 /* If we managed to iterate through each PHI alternative without
946 breaking out of the loop, then we have a PHI which may create
947 a useful equivalence. We do not need to record unwind data for
948 this, since this is a true assignment and not an equivalence
949 inferred from a comparison. All uses of this ssa name are dominated
950 by this assignment, so unwinding just costs time and space. */
951 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
952 set_ssa_name_value (lhs, rhs);
956 /* Ignoring loop backedges, if BB has precisely one incoming edge then
957 return that edge. Otherwise return NULL. */
958 static edge
959 single_incoming_edge_ignoring_loop_edges (basic_block bb)
961 edge retval = NULL;
962 edge e;
963 edge_iterator ei;
965 FOR_EACH_EDGE (e, ei, bb->preds)
967 /* A loop back edge can be identified by the destination of
968 the edge dominating the source of the edge. */
969 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
970 continue;
972 /* If we have already seen a non-loop edge, then we must have
973 multiple incoming non-loop edges and thus we return NULL. */
974 if (retval)
975 return NULL;
977 /* This is the first non-loop incoming edge we have found. Record
978 it. */
979 retval = e;
982 return retval;
985 /* Record any equivalences created by the incoming edge to BB. If BB
986 has more than one incoming edge, then no equivalence is created. */
988 static void
989 record_equivalences_from_incoming_edge (basic_block bb)
991 edge e;
992 basic_block parent;
993 struct edge_info *edge_info;
995 /* If our parent block ended with a control statement, then we may be
996 able to record some equivalences based on which outgoing edge from
997 the parent was followed. */
998 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1000 e = single_incoming_edge_ignoring_loop_edges (bb);
1002 /* If we had a single incoming edge from our parent block, then enter
1003 any data associated with the edge into our tables. */
1004 if (e && e->src == parent)
1006 unsigned int i;
1008 edge_info = (struct edge_info *) e->aux;
1010 if (edge_info)
1012 tree lhs = edge_info->lhs;
1013 tree rhs = edge_info->rhs;
1014 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1016 if (lhs)
1017 record_equality (lhs, rhs);
1019 if (cond_equivalences)
1020 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1021 record_cond (&cond_equivalences[i]);
1026 /* Dump SSA statistics on FILE. */
1028 void
1029 dump_dominator_optimization_stats (FILE *file)
1031 fprintf (file, "Total number of statements: %6ld\n\n",
1032 opt_stats.num_stmts);
1033 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1034 opt_stats.num_exprs_considered);
1036 fprintf (file, "\nHash table statistics:\n");
1038 fprintf (file, " avail_exprs: ");
1039 htab_statistics (file, avail_exprs);
1043 /* Dump SSA statistics on stderr. */
1045 void
1046 debug_dominator_optimization_stats (void)
1048 dump_dominator_optimization_stats (stderr);
1052 /* Dump statistics for the hash table HTAB. */
1054 static void
1055 htab_statistics (FILE *file, htab_t htab)
1057 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1058 (long) htab_size (htab),
1059 (long) htab_elements (htab),
1060 htab_collisions (htab));
1064 /* Enter condition equivalence into the expression hash table.
1065 This indicates that a conditional expression has a known
1066 boolean value. */
1068 static void
1069 record_cond (struct cond_equivalence *p)
1071 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1072 void **slot;
1074 initialize_hash_element_from_expr (&p->cond, p->value, element);
1076 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1077 element->hash, INSERT);
1078 if (*slot == NULL)
1080 *slot = (void *) element;
1082 if (dump_file && (dump_flags & TDF_DETAILS))
1084 fprintf (dump_file, "1>>> ");
1085 print_expr_hash_elt (dump_file, element);
1088 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1090 else
1091 free (element);
1094 /* Build a cond_equivalence record indicating that the comparison
1095 CODE holds between operands OP0 and OP1. */
1097 static void
1098 build_and_record_new_cond (enum tree_code code,
1099 tree op0, tree op1,
1100 struct cond_equivalence *p)
1102 struct hashable_expr *cond = &p->cond;
1104 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1106 cond->type = boolean_type_node;
1107 cond->kind = EXPR_BINARY;
1108 cond->ops.binary.op = code;
1109 cond->ops.binary.opnd0 = op0;
1110 cond->ops.binary.opnd1 = op1;
1112 p->value = boolean_true_node;
1115 /* Record that COND is true and INVERTED is false into the edge information
1116 structure. Also record that any conditions dominated by COND are true
1117 as well.
1119 For example, if a < b is true, then a <= b must also be true. */
1121 static void
1122 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1124 tree op0, op1;
1126 if (!COMPARISON_CLASS_P (cond))
1127 return;
1129 op0 = TREE_OPERAND (cond, 0);
1130 op1 = TREE_OPERAND (cond, 1);
1132 switch (TREE_CODE (cond))
1134 case LT_EXPR:
1135 case GT_EXPR:
1136 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1138 edge_info->max_cond_equivalences = 6;
1139 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 6);
1140 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1141 &edge_info->cond_equivalences[4]);
1142 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1143 &edge_info->cond_equivalences[5]);
1145 else
1147 edge_info->max_cond_equivalences = 4;
1148 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1151 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1152 ? LE_EXPR : GE_EXPR),
1153 op0, op1, &edge_info->cond_equivalences[2]);
1154 build_and_record_new_cond (NE_EXPR, op0, op1,
1155 &edge_info->cond_equivalences[3]);
1156 break;
1158 case GE_EXPR:
1159 case LE_EXPR:
1160 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1162 edge_info->max_cond_equivalences = 3;
1163 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 3);
1164 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1165 &edge_info->cond_equivalences[2]);
1167 else
1169 edge_info->max_cond_equivalences = 2;
1170 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1172 break;
1174 case EQ_EXPR:
1175 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1177 edge_info->max_cond_equivalences = 5;
1178 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 5);
1179 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1180 &edge_info->cond_equivalences[4]);
1182 else
1184 edge_info->max_cond_equivalences = 4;
1185 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1187 build_and_record_new_cond (LE_EXPR, op0, op1,
1188 &edge_info->cond_equivalences[2]);
1189 build_and_record_new_cond (GE_EXPR, op0, op1,
1190 &edge_info->cond_equivalences[3]);
1191 break;
1193 case UNORDERED_EXPR:
1194 edge_info->max_cond_equivalences = 8;
1195 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 8);
1196 build_and_record_new_cond (NE_EXPR, op0, op1,
1197 &edge_info->cond_equivalences[2]);
1198 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1199 &edge_info->cond_equivalences[3]);
1200 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1201 &edge_info->cond_equivalences[4]);
1202 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1203 &edge_info->cond_equivalences[5]);
1204 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1205 &edge_info->cond_equivalences[6]);
1206 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1207 &edge_info->cond_equivalences[7]);
1208 break;
1210 case UNLT_EXPR:
1211 case UNGT_EXPR:
1212 edge_info->max_cond_equivalences = 4;
1213 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1214 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1215 ? UNLE_EXPR : UNGE_EXPR),
1216 op0, op1, &edge_info->cond_equivalences[2]);
1217 build_and_record_new_cond (NE_EXPR, op0, op1,
1218 &edge_info->cond_equivalences[3]);
1219 break;
1221 case UNEQ_EXPR:
1222 edge_info->max_cond_equivalences = 4;
1223 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1224 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1225 &edge_info->cond_equivalences[2]);
1226 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1227 &edge_info->cond_equivalences[3]);
1228 break;
1230 case LTGT_EXPR:
1231 edge_info->max_cond_equivalences = 4;
1232 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1233 build_and_record_new_cond (NE_EXPR, op0, op1,
1234 &edge_info->cond_equivalences[2]);
1235 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1236 &edge_info->cond_equivalences[3]);
1237 break;
1239 default:
1240 edge_info->max_cond_equivalences = 2;
1241 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1242 break;
1245 /* Now store the original true and false conditions into the first
1246 two slots. */
1247 initialize_expr_from_cond (cond, &edge_info->cond_equivalences[0].cond);
1248 edge_info->cond_equivalences[0].value = boolean_true_node;
1250 /* It is possible for INVERTED to be the negation of a comparison,
1251 and not a valid RHS or GIMPLE_COND condition. This happens because
1252 invert_truthvalue may return such an expression when asked to invert
1253 a floating-point comparison. These comparisons are not assumed to
1254 obey the trichotomy law. */
1255 initialize_expr_from_cond (inverted, &edge_info->cond_equivalences[1].cond);
1256 edge_info->cond_equivalences[1].value = boolean_false_node;
1259 /* A helper function for record_const_or_copy and record_equality.
1260 Do the work of recording the value and undo info. */
1262 static void
1263 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1265 set_ssa_name_value (x, y);
1267 if (dump_file && (dump_flags & TDF_DETAILS))
1269 fprintf (dump_file, "0>>> COPY ");
1270 print_generic_expr (dump_file, x, 0);
1271 fprintf (dump_file, " = ");
1272 print_generic_expr (dump_file, y, 0);
1273 fprintf (dump_file, "\n");
1276 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1277 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1278 VEC_quick_push (tree, const_and_copies_stack, x);
1281 /* Return the loop depth of the basic block of the defining statement of X.
1282 This number should not be treated as absolutely correct because the loop
1283 information may not be completely up-to-date when dom runs. However, it
1284 will be relatively correct, and as more passes are taught to keep loop info
1285 up to date, the result will become more and more accurate. */
1288 loop_depth_of_name (tree x)
1290 gimple defstmt;
1291 basic_block defbb;
1293 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1294 if (TREE_CODE (x) != SSA_NAME)
1295 return 0;
1297 /* Otherwise return the loop depth of the defining statement's bb.
1298 Note that there may not actually be a bb for this statement, if the
1299 ssa_name is live on entry. */
1300 defstmt = SSA_NAME_DEF_STMT (x);
1301 defbb = gimple_bb (defstmt);
1302 if (!defbb)
1303 return 0;
1305 return defbb->loop_depth;
1308 /* Record that X is equal to Y in const_and_copies. Record undo
1309 information in the block-local vector. */
1311 static void
1312 record_const_or_copy (tree x, tree y)
1314 tree prev_x = SSA_NAME_VALUE (x);
1316 gcc_assert (TREE_CODE (x) == SSA_NAME);
1318 if (TREE_CODE (y) == SSA_NAME)
1320 tree tmp = SSA_NAME_VALUE (y);
1321 if (tmp)
1322 y = tmp;
1325 record_const_or_copy_1 (x, y, prev_x);
1328 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1329 This constrains the cases in which we may treat this as assignment. */
1331 static void
1332 record_equality (tree x, tree y)
1334 tree prev_x = NULL, prev_y = NULL;
1336 if (TREE_CODE (x) == SSA_NAME)
1337 prev_x = SSA_NAME_VALUE (x);
1338 if (TREE_CODE (y) == SSA_NAME)
1339 prev_y = SSA_NAME_VALUE (y);
1341 /* If one of the previous values is invariant, or invariant in more loops
1342 (by depth), then use that.
1343 Otherwise it doesn't matter which value we choose, just so
1344 long as we canonicalize on one value. */
1345 if (is_gimple_min_invariant (y))
1347 else if (is_gimple_min_invariant (x)
1348 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1349 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1350 else if (prev_x && is_gimple_min_invariant (prev_x))
1351 x = y, y = prev_x, prev_x = prev_y;
1352 else if (prev_y)
1353 y = prev_y;
1355 /* After the swapping, we must have one SSA_NAME. */
1356 if (TREE_CODE (x) != SSA_NAME)
1357 return;
1359 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1360 variable compared against zero. If we're honoring signed zeros,
1361 then we cannot record this value unless we know that the value is
1362 nonzero. */
1363 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1364 && (TREE_CODE (y) != REAL_CST
1365 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1366 return;
1368 record_const_or_copy_1 (x, y, prev_x);
1371 /* Returns true when STMT is a simple iv increment. It detects the
1372 following situation:
1374 i_1 = phi (..., i_2)
1375 i_2 = i_1 +/- ... */
1377 static bool
1378 simple_iv_increment_p (gimple stmt)
1380 tree lhs, preinc;
1381 gimple phi;
1382 size_t i;
1384 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1385 return false;
1387 lhs = gimple_assign_lhs (stmt);
1388 if (TREE_CODE (lhs) != SSA_NAME)
1389 return false;
1391 if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
1392 && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
1393 return false;
1395 preinc = gimple_assign_rhs1 (stmt);
1397 if (TREE_CODE (preinc) != SSA_NAME)
1398 return false;
1400 phi = SSA_NAME_DEF_STMT (preinc);
1401 if (gimple_code (phi) != GIMPLE_PHI)
1402 return false;
1404 for (i = 0; i < gimple_phi_num_args (phi); i++)
1405 if (gimple_phi_arg_def (phi, i) == lhs)
1406 return true;
1408 return false;
1411 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1412 known value for that SSA_NAME (or NULL if no value is known).
1414 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1415 successors of BB. */
1417 static void
1418 cprop_into_successor_phis (basic_block bb)
1420 edge e;
1421 edge_iterator ei;
1423 FOR_EACH_EDGE (e, ei, bb->succs)
1425 int indx;
1426 gimple_stmt_iterator gsi;
1428 /* If this is an abnormal edge, then we do not want to copy propagate
1429 into the PHI alternative associated with this edge. */
1430 if (e->flags & EDGE_ABNORMAL)
1431 continue;
1433 gsi = gsi_start_phis (e->dest);
1434 if (gsi_end_p (gsi))
1435 continue;
1437 indx = e->dest_idx;
1438 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1440 tree new_val;
1441 use_operand_p orig_p;
1442 tree orig_val;
1443 gimple phi = gsi_stmt (gsi);
1445 /* The alternative may be associated with a constant, so verify
1446 it is an SSA_NAME before doing anything with it. */
1447 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1448 orig_val = get_use_from_ptr (orig_p);
1449 if (TREE_CODE (orig_val) != SSA_NAME)
1450 continue;
1452 /* If we have *ORIG_P in our constant/copy table, then replace
1453 ORIG_P with its value in our constant/copy table. */
1454 new_val = SSA_NAME_VALUE (orig_val);
1455 if (new_val
1456 && new_val != orig_val
1457 && (TREE_CODE (new_val) == SSA_NAME
1458 || is_gimple_min_invariant (new_val))
1459 && may_propagate_copy (orig_val, new_val))
1460 propagate_value (orig_p, new_val);
1465 /* We have finished optimizing BB, record any information implied by
1466 taking a specific outgoing edge from BB. */
1468 static void
1469 record_edge_info (basic_block bb)
1471 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1472 struct edge_info *edge_info;
1474 if (! gsi_end_p (gsi))
1476 gimple stmt = gsi_stmt (gsi);
1477 location_t loc = gimple_location (stmt);
1479 if (gimple_code (stmt) == GIMPLE_SWITCH)
1481 tree index = gimple_switch_index (stmt);
1483 if (TREE_CODE (index) == SSA_NAME)
1485 int i;
1486 int n_labels = gimple_switch_num_labels (stmt);
1487 tree *info = XCNEWVEC (tree, last_basic_block);
1488 edge e;
1489 edge_iterator ei;
1491 for (i = 0; i < n_labels; i++)
1493 tree label = gimple_switch_label (stmt, i);
1494 basic_block target_bb = label_to_block (CASE_LABEL (label));
1495 if (CASE_HIGH (label)
1496 || !CASE_LOW (label)
1497 || info[target_bb->index])
1498 info[target_bb->index] = error_mark_node;
1499 else
1500 info[target_bb->index] = label;
1503 FOR_EACH_EDGE (e, ei, bb->succs)
1505 basic_block target_bb = e->dest;
1506 tree label = info[target_bb->index];
1508 if (label != NULL && label != error_mark_node)
1510 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1511 CASE_LOW (label));
1512 edge_info = allocate_edge_info (e);
1513 edge_info->lhs = index;
1514 edge_info->rhs = x;
1517 free (info);
1521 /* A COND_EXPR may create equivalences too. */
1522 if (gimple_code (stmt) == GIMPLE_COND)
1524 edge true_edge;
1525 edge false_edge;
1527 tree op0 = gimple_cond_lhs (stmt);
1528 tree op1 = gimple_cond_rhs (stmt);
1529 enum tree_code code = gimple_cond_code (stmt);
1531 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1533 /* Special case comparing booleans against a constant as we
1534 know the value of OP0 on both arms of the branch. i.e., we
1535 can record an equivalence for OP0 rather than COND. */
1536 if ((code == EQ_EXPR || code == NE_EXPR)
1537 && TREE_CODE (op0) == SSA_NAME
1538 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1539 && is_gimple_min_invariant (op1))
1541 if (code == EQ_EXPR)
1543 edge_info = allocate_edge_info (true_edge);
1544 edge_info->lhs = op0;
1545 edge_info->rhs = (integer_zerop (op1)
1546 ? boolean_false_node
1547 : boolean_true_node);
1549 edge_info = allocate_edge_info (false_edge);
1550 edge_info->lhs = op0;
1551 edge_info->rhs = (integer_zerop (op1)
1552 ? boolean_true_node
1553 : boolean_false_node);
1555 else
1557 edge_info = allocate_edge_info (true_edge);
1558 edge_info->lhs = op0;
1559 edge_info->rhs = (integer_zerop (op1)
1560 ? boolean_true_node
1561 : boolean_false_node);
1563 edge_info = allocate_edge_info (false_edge);
1564 edge_info->lhs = op0;
1565 edge_info->rhs = (integer_zerop (op1)
1566 ? boolean_false_node
1567 : boolean_true_node);
1570 else if (is_gimple_min_invariant (op0)
1571 && (TREE_CODE (op1) == SSA_NAME
1572 || is_gimple_min_invariant (op1)))
1574 tree cond = build2 (code, boolean_type_node, op0, op1);
1575 tree inverted = invert_truthvalue_loc (loc, cond);
1576 struct edge_info *edge_info;
1578 edge_info = allocate_edge_info (true_edge);
1579 record_conditions (edge_info, cond, inverted);
1581 if (code == EQ_EXPR)
1583 edge_info->lhs = op1;
1584 edge_info->rhs = op0;
1587 edge_info = allocate_edge_info (false_edge);
1588 record_conditions (edge_info, inverted, cond);
1590 if (code == NE_EXPR)
1592 edge_info->lhs = op1;
1593 edge_info->rhs = op0;
1597 else if (TREE_CODE (op0) == SSA_NAME
1598 && (is_gimple_min_invariant (op1)
1599 || TREE_CODE (op1) == SSA_NAME))
1601 tree cond = build2 (code, boolean_type_node, op0, op1);
1602 tree inverted = invert_truthvalue_loc (loc, cond);
1603 struct edge_info *edge_info;
1605 edge_info = allocate_edge_info (true_edge);
1606 record_conditions (edge_info, cond, inverted);
1608 if (code == EQ_EXPR)
1610 edge_info->lhs = op0;
1611 edge_info->rhs = op1;
1614 edge_info = allocate_edge_info (false_edge);
1615 record_conditions (edge_info, inverted, cond);
1617 if (TREE_CODE (cond) == NE_EXPR)
1619 edge_info->lhs = op0;
1620 edge_info->rhs = op1;
1625 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1629 static void
1630 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1631 basic_block bb)
1633 gimple_stmt_iterator gsi;
1635 if (dump_file && (dump_flags & TDF_DETAILS))
1636 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1638 /* Push a marker on the stacks of local information so that we know how
1639 far to unwind when we finalize this block. */
1640 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1641 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1643 record_equivalences_from_incoming_edge (bb);
1645 /* PHI nodes can create equivalences too. */
1646 record_equivalences_from_phis (bb);
1648 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1649 optimize_stmt (bb, gsi);
1651 /* Now prepare to process dominated blocks. */
1652 record_edge_info (bb);
1653 cprop_into_successor_phis (bb);
1656 /* We have finished processing the dominator children of BB, perform
1657 any finalization actions in preparation for leaving this node in
1658 the dominator tree. */
1660 static void
1661 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1663 gimple last;
1665 /* If we have an outgoing edge to a block with multiple incoming and
1666 outgoing edges, then we may be able to thread the edge, i.e., we
1667 may be able to statically determine which of the outgoing edges
1668 will be traversed when the incoming edge from BB is traversed. */
1669 if (single_succ_p (bb)
1670 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1671 && potentially_threadable_block (single_succ (bb)))
1673 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1675 else if ((last = last_stmt (bb))
1676 && gimple_code (last) == GIMPLE_COND
1677 && EDGE_COUNT (bb->succs) == 2
1678 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1679 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1681 edge true_edge, false_edge;
1683 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1685 /* Only try to thread the edge if it reaches a target block with
1686 more than one predecessor and more than one successor. */
1687 if (potentially_threadable_block (true_edge->dest))
1689 struct edge_info *edge_info;
1690 unsigned int i;
1692 /* Push a marker onto the available expression stack so that we
1693 unwind any expressions related to the TRUE arm before processing
1694 the false arm below. */
1695 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1696 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1698 edge_info = (struct edge_info *) true_edge->aux;
1700 /* If we have info associated with this edge, record it into
1701 our equivalence tables. */
1702 if (edge_info)
1704 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1705 tree lhs = edge_info->lhs;
1706 tree rhs = edge_info->rhs;
1708 /* If we have a simple NAME = VALUE equivalence, record it. */
1709 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1710 record_const_or_copy (lhs, rhs);
1712 /* If we have 0 = COND or 1 = COND equivalences, record them
1713 into our expression hash tables. */
1714 if (cond_equivalences)
1715 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1716 record_cond (&cond_equivalences[i]);
1719 dom_thread_across_edge (walk_data, true_edge);
1721 /* And restore the various tables to their state before
1722 we threaded this edge. */
1723 remove_local_expressions_from_table ();
1726 /* Similarly for the ELSE arm. */
1727 if (potentially_threadable_block (false_edge->dest))
1729 struct edge_info *edge_info;
1730 unsigned int i;
1732 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1733 edge_info = (struct edge_info *) false_edge->aux;
1735 /* If we have info associated with this edge, record it into
1736 our equivalence tables. */
1737 if (edge_info)
1739 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1740 tree lhs = edge_info->lhs;
1741 tree rhs = edge_info->rhs;
1743 /* If we have a simple NAME = VALUE equivalence, record it. */
1744 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1745 record_const_or_copy (lhs, rhs);
1747 /* If we have 0 = COND or 1 = COND equivalences, record them
1748 into our expression hash tables. */
1749 if (cond_equivalences)
1750 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1751 record_cond (&cond_equivalences[i]);
1754 /* Now thread the edge. */
1755 dom_thread_across_edge (walk_data, false_edge);
1757 /* No need to remove local expressions from our tables
1758 or restore vars to their original value as that will
1759 be done immediately below. */
1763 remove_local_expressions_from_table ();
1764 restore_vars_to_original_value ();
1767 /* Search for redundant computations in STMT. If any are found, then
1768 replace them with the variable holding the result of the computation.
1770 If safe, record this expression into the available expression hash
1771 table. */
1773 static void
1774 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1776 tree expr_type;
1777 tree cached_lhs;
1778 bool insert = true;
1779 bool assigns_var_p = false;
1781 gimple stmt = gsi_stmt (*gsi);
1783 tree def = gimple_get_lhs (stmt);
1785 /* Certain expressions on the RHS can be optimized away, but can not
1786 themselves be entered into the hash tables. */
1787 if (! def
1788 || TREE_CODE (def) != SSA_NAME
1789 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1790 || gimple_vdef (stmt)
1791 /* Do not record equivalences for increments of ivs. This would create
1792 overlapping live ranges for a very questionable gain. */
1793 || simple_iv_increment_p (stmt))
1794 insert = false;
1796 /* Check if the expression has been computed before. */
1797 cached_lhs = lookup_avail_expr (stmt, insert);
1799 opt_stats.num_exprs_considered++;
1801 /* Get the type of the expression we are trying to optimize. */
1802 if (is_gimple_assign (stmt))
1804 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1805 assigns_var_p = true;
1807 else if (gimple_code (stmt) == GIMPLE_COND)
1808 expr_type = boolean_type_node;
1809 else if (is_gimple_call (stmt))
1811 gcc_assert (gimple_call_lhs (stmt));
1812 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1813 assigns_var_p = true;
1815 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1816 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1817 else
1818 gcc_unreachable ();
1820 if (!cached_lhs)
1821 return;
1823 /* It is safe to ignore types here since we have already done
1824 type checking in the hashing and equality routines. In fact
1825 type checking here merely gets in the way of constant
1826 propagation. Also, make sure that it is safe to propagate
1827 CACHED_LHS into the expression in STMT. */
1828 if ((TREE_CODE (cached_lhs) != SSA_NAME
1829 && (assigns_var_p
1830 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1831 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1833 #if defined ENABLE_CHECKING
1834 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1835 || is_gimple_min_invariant (cached_lhs));
1836 #endif
1838 if (dump_file && (dump_flags & TDF_DETAILS))
1840 fprintf (dump_file, " Replaced redundant expr '");
1841 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1842 fprintf (dump_file, "' with '");
1843 print_generic_expr (dump_file, cached_lhs, dump_flags);
1844 fprintf (dump_file, "'\n");
1847 opt_stats.num_re++;
1849 if (assigns_var_p
1850 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1851 cached_lhs = fold_convert (expr_type, cached_lhs);
1853 propagate_tree_value_into_stmt (gsi, cached_lhs);
1855 /* Since it is always necessary to mark the result as modified,
1856 perhaps we should move this into propagate_tree_value_into_stmt
1857 itself. */
1858 gimple_set_modified (gsi_stmt (*gsi), true);
1862 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1863 the available expressions table or the const_and_copies table.
1864 Detect and record those equivalences. */
1865 /* We handle only very simple copy equivalences here. The heavy
1866 lifing is done by eliminate_redundant_computations. */
1868 static void
1869 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1871 tree lhs;
1872 enum tree_code lhs_code;
1874 gcc_assert (is_gimple_assign (stmt));
1876 lhs = gimple_assign_lhs (stmt);
1877 lhs_code = TREE_CODE (lhs);
1879 if (lhs_code == SSA_NAME
1880 && gimple_assign_single_p (stmt))
1882 tree rhs = gimple_assign_rhs1 (stmt);
1884 /* If the RHS of the assignment is a constant or another variable that
1885 may be propagated, register it in the CONST_AND_COPIES table. We
1886 do not need to record unwind data for this, since this is a true
1887 assignment and not an equivalence inferred from a comparison. All
1888 uses of this ssa name are dominated by this assignment, so unwinding
1889 just costs time and space. */
1890 if (may_optimize_p
1891 && (TREE_CODE (rhs) == SSA_NAME
1892 || is_gimple_min_invariant (rhs)))
1894 if (dump_file && (dump_flags & TDF_DETAILS))
1896 fprintf (dump_file, "==== ASGN ");
1897 print_generic_expr (dump_file, lhs, 0);
1898 fprintf (dump_file, " = ");
1899 print_generic_expr (dump_file, rhs, 0);
1900 fprintf (dump_file, "\n");
1903 set_ssa_name_value (lhs, rhs);
1907 /* A memory store, even an aliased store, creates a useful
1908 equivalence. By exchanging the LHS and RHS, creating suitable
1909 vops and recording the result in the available expression table,
1910 we may be able to expose more redundant loads. */
1911 if (!gimple_has_volatile_ops (stmt)
1912 && gimple_references_memory_p (stmt)
1913 && gimple_assign_single_p (stmt)
1914 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1915 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1916 && !is_gimple_reg (lhs))
1918 tree rhs = gimple_assign_rhs1 (stmt);
1919 gimple new_stmt;
1921 /* Build a new statement with the RHS and LHS exchanged. */
1922 if (TREE_CODE (rhs) == SSA_NAME)
1924 /* NOTE tuples. The call to gimple_build_assign below replaced
1925 a call to build_gimple_modify_stmt, which did not set the
1926 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1927 may cause an SSA validation failure, as the LHS may be a
1928 default-initialized name and should have no definition. I'm
1929 a bit dubious of this, as the artificial statement that we
1930 generate here may in fact be ill-formed, but it is simply
1931 used as an internal device in this pass, and never becomes
1932 part of the CFG. */
1933 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1934 new_stmt = gimple_build_assign (rhs, lhs);
1935 SSA_NAME_DEF_STMT (rhs) = defstmt;
1937 else
1938 new_stmt = gimple_build_assign (rhs, lhs);
1940 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1942 /* Finally enter the statement into the available expression
1943 table. */
1944 lookup_avail_expr (new_stmt, true);
1948 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1949 CONST_AND_COPIES. */
1951 static void
1952 cprop_operand (gimple stmt, use_operand_p op_p)
1954 tree val;
1955 tree op = USE_FROM_PTR (op_p);
1957 /* If the operand has a known constant value or it is known to be a
1958 copy of some other variable, use the value or copy stored in
1959 CONST_AND_COPIES. */
1960 val = SSA_NAME_VALUE (op);
1961 if (val && val != op)
1963 /* Do not change the base variable in the virtual operand
1964 tables. That would make it impossible to reconstruct
1965 the renamed virtual operand if we later modify this
1966 statement. Also only allow the new value to be an SSA_NAME
1967 for propagation into virtual operands. */
1968 if (!is_gimple_reg (op)
1969 && (TREE_CODE (val) != SSA_NAME
1970 || is_gimple_reg (val)
1971 || get_virtual_var (val) != get_virtual_var (op)))
1972 return;
1974 /* Do not replace hard register operands in asm statements. */
1975 if (gimple_code (stmt) == GIMPLE_ASM
1976 && !may_propagate_copy_into_asm (op))
1977 return;
1979 /* Certain operands are not allowed to be copy propagated due
1980 to their interaction with exception handling and some GCC
1981 extensions. */
1982 if (!may_propagate_copy (op, val))
1983 return;
1985 /* Do not propagate addresses that point to volatiles into memory
1986 stmts without volatile operands. */
1987 if (POINTER_TYPE_P (TREE_TYPE (val))
1988 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
1989 && gimple_has_mem_ops (stmt)
1990 && !gimple_has_volatile_ops (stmt))
1991 return;
1993 /* Do not propagate copies if the propagated value is at a deeper loop
1994 depth than the propagatee. Otherwise, this may move loop variant
1995 variables outside of their loops and prevent coalescing
1996 opportunities. If the value was loop invariant, it will be hoisted
1997 by LICM and exposed for copy propagation. */
1998 if (loop_depth_of_name (val) > loop_depth_of_name (op))
1999 return;
2001 /* Do not propagate copies into simple IV increment statements.
2002 See PR23821 for how this can disturb IV analysis. */
2003 if (TREE_CODE (val) != INTEGER_CST
2004 && simple_iv_increment_p (stmt))
2005 return;
2007 /* Dump details. */
2008 if (dump_file && (dump_flags & TDF_DETAILS))
2010 fprintf (dump_file, " Replaced '");
2011 print_generic_expr (dump_file, op, dump_flags);
2012 fprintf (dump_file, "' with %s '",
2013 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2014 print_generic_expr (dump_file, val, dump_flags);
2015 fprintf (dump_file, "'\n");
2018 if (TREE_CODE (val) != SSA_NAME)
2019 opt_stats.num_const_prop++;
2020 else
2021 opt_stats.num_copy_prop++;
2023 propagate_value (op_p, val);
2025 /* And note that we modified this statement. This is now
2026 safe, even if we changed virtual operands since we will
2027 rescan the statement and rewrite its operands again. */
2028 gimple_set_modified (stmt, true);
2032 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2033 known value for that SSA_NAME (or NULL if no value is known).
2035 Propagate values from CONST_AND_COPIES into the uses, vuses and
2036 vdef_ops of STMT. */
2038 static void
2039 cprop_into_stmt (gimple stmt)
2041 use_operand_p op_p;
2042 ssa_op_iter iter;
2044 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2046 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2047 cprop_operand (stmt, op_p);
2051 /* Optimize the statement pointed to by iterator SI.
2053 We try to perform some simplistic global redundancy elimination and
2054 constant propagation:
2056 1- To detect global redundancy, we keep track of expressions that have
2057 been computed in this block and its dominators. If we find that the
2058 same expression is computed more than once, we eliminate repeated
2059 computations by using the target of the first one.
2061 2- Constant values and copy assignments. This is used to do very
2062 simplistic constant and copy propagation. When a constant or copy
2063 assignment is found, we map the value on the RHS of the assignment to
2064 the variable in the LHS in the CONST_AND_COPIES table. */
2066 static void
2067 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2069 gimple stmt, old_stmt;
2070 bool may_optimize_p;
2071 bool modified_p = false;
2073 old_stmt = stmt = gsi_stmt (si);
2075 if (gimple_code (stmt) == GIMPLE_COND)
2076 canonicalize_comparison (stmt);
2078 update_stmt_if_modified (stmt);
2079 opt_stats.num_stmts++;
2081 if (dump_file && (dump_flags & TDF_DETAILS))
2083 fprintf (dump_file, "Optimizing statement ");
2084 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2087 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2088 cprop_into_stmt (stmt);
2090 /* If the statement has been modified with constant replacements,
2091 fold its RHS before checking for redundant computations. */
2092 if (gimple_modified_p (stmt))
2094 tree rhs = NULL;
2096 /* Try to fold the statement making sure that STMT is kept
2097 up to date. */
2098 if (fold_stmt (&si))
2100 stmt = gsi_stmt (si);
2101 gimple_set_modified (stmt, true);
2103 if (dump_file && (dump_flags & TDF_DETAILS))
2105 fprintf (dump_file, " Folded to: ");
2106 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2110 /* We only need to consider cases that can yield a gimple operand. */
2111 if (gimple_assign_single_p (stmt))
2112 rhs = gimple_assign_rhs1 (stmt);
2113 else if (gimple_code (stmt) == GIMPLE_GOTO)
2114 rhs = gimple_goto_dest (stmt);
2115 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2116 /* This should never be an ADDR_EXPR. */
2117 rhs = gimple_switch_index (stmt);
2119 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2120 recompute_tree_invariant_for_addr_expr (rhs);
2122 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2123 even if fold_stmt updated the stmt already and thus cleared
2124 gimple_modified_p flag on it. */
2125 modified_p = true;
2128 /* Check for redundant computations. Do this optimization only
2129 for assignments that have no volatile ops and conditionals. */
2130 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2131 && ((is_gimple_assign (stmt)
2132 && !gimple_rhs_has_side_effects (stmt))
2133 || (is_gimple_call (stmt)
2134 && gimple_call_lhs (stmt) != NULL_TREE
2135 && !gimple_rhs_has_side_effects (stmt))
2136 || gimple_code (stmt) == GIMPLE_COND
2137 || gimple_code (stmt) == GIMPLE_SWITCH));
2139 if (may_optimize_p)
2141 if (gimple_code (stmt) == GIMPLE_CALL)
2143 /* Resolve __builtin_constant_p. If it hasn't been
2144 folded to integer_one_node by now, it's fairly
2145 certain that the value simply isn't constant. */
2146 tree callee = gimple_call_fndecl (stmt);
2147 if (callee
2148 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2149 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2151 propagate_tree_value_into_stmt (&si, integer_zero_node);
2152 stmt = gsi_stmt (si);
2156 update_stmt_if_modified (stmt);
2157 eliminate_redundant_computations (&si);
2158 stmt = gsi_stmt (si);
2161 /* Record any additional equivalences created by this statement. */
2162 if (is_gimple_assign (stmt))
2163 record_equivalences_from_stmt (stmt, may_optimize_p);
2165 /* If STMT is a COND_EXPR and it was modified, then we may know
2166 where it goes. If that is the case, then mark the CFG as altered.
2168 This will cause us to later call remove_unreachable_blocks and
2169 cleanup_tree_cfg when it is safe to do so. It is not safe to
2170 clean things up here since removal of edges and such can trigger
2171 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2172 the manager.
2174 That's all fine and good, except that once SSA_NAMEs are released
2175 to the manager, we must not call create_ssa_name until all references
2176 to released SSA_NAMEs have been eliminated.
2178 All references to the deleted SSA_NAMEs can not be eliminated until
2179 we remove unreachable blocks.
2181 We can not remove unreachable blocks until after we have completed
2182 any queued jump threading.
2184 We can not complete any queued jump threads until we have taken
2185 appropriate variables out of SSA form. Taking variables out of
2186 SSA form can call create_ssa_name and thus we lose.
2188 Ultimately I suspect we're going to need to change the interface
2189 into the SSA_NAME manager. */
2190 if (gimple_modified_p (stmt) || modified_p)
2192 tree val = NULL;
2194 update_stmt_if_modified (stmt);
2196 if (gimple_code (stmt) == GIMPLE_COND)
2197 val = fold_binary_loc (gimple_location (stmt),
2198 gimple_cond_code (stmt), boolean_type_node,
2199 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2200 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2201 val = gimple_switch_index (stmt);
2203 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2204 cfg_altered = true;
2206 /* If we simplified a statement in such a way as to be shown that it
2207 cannot trap, update the eh information and the cfg to match. */
2208 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2210 bitmap_set_bit (need_eh_cleanup, bb->index);
2211 if (dump_file && (dump_flags & TDF_DETAILS))
2212 fprintf (dump_file, " Flagged to clear EH edges.\n");
2217 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2218 If found, return its LHS. Otherwise insert STMT in the table and
2219 return NULL_TREE.
2221 Also, when an expression is first inserted in the table, it is also
2222 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2223 we finish processing this block and its children. */
2225 static tree
2226 lookup_avail_expr (gimple stmt, bool insert)
2228 void **slot;
2229 tree lhs;
2230 tree temp;
2231 struct expr_hash_elt element;
2233 /* Get LHS of assignment or call, else NULL_TREE. */
2234 lhs = gimple_get_lhs (stmt);
2236 initialize_hash_element (stmt, lhs, &element);
2238 if (dump_file && (dump_flags & TDF_DETAILS))
2240 fprintf (dump_file, "LKUP ");
2241 print_expr_hash_elt (dump_file, &element);
2244 /* Don't bother remembering constant assignments and copy operations.
2245 Constants and copy operations are handled by the constant/copy propagator
2246 in optimize_stmt. */
2247 if (element.expr.kind == EXPR_SINGLE
2248 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2249 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2250 return NULL_TREE;
2252 /* Finally try to find the expression in the main expression hash table. */
2253 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2254 (insert ? INSERT : NO_INSERT));
2255 if (slot == NULL)
2256 return NULL_TREE;
2258 if (*slot == NULL)
2260 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2261 *element2 = element;
2262 element2->stamp = element2;
2263 *slot = (void *) element2;
2265 if (dump_file && (dump_flags & TDF_DETAILS))
2267 fprintf (dump_file, "2>>> ");
2268 print_expr_hash_elt (dump_file, element2);
2271 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
2272 return NULL_TREE;
2275 /* Extract the LHS of the assignment so that it can be used as the current
2276 definition of another variable. */
2277 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2279 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2280 use the value from the const_and_copies table. */
2281 if (TREE_CODE (lhs) == SSA_NAME)
2283 temp = SSA_NAME_VALUE (lhs);
2284 if (temp)
2285 lhs = temp;
2288 if (dump_file && (dump_flags & TDF_DETAILS))
2290 fprintf (dump_file, "FIND: ");
2291 print_generic_expr (dump_file, lhs, 0);
2292 fprintf (dump_file, "\n");
2295 return lhs;
2298 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2299 for expressions using the code of the expression and the SSA numbers of
2300 its operands. */
2302 static hashval_t
2303 avail_expr_hash (const void *p)
2305 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2306 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2307 tree vuse;
2308 hashval_t val = 0;
2310 val = iterative_hash_hashable_expr (expr, val);
2312 /* If the hash table entry is not associated with a statement, then we
2313 can just hash the expression and not worry about virtual operands
2314 and such. */
2315 if (!stmt)
2316 return val;
2318 /* Add the SSA version numbers of the vuse operand. This is important
2319 because compound variables like arrays are not renamed in the
2320 operands. Rather, the rename is done on the virtual variable
2321 representing all the elements of the array. */
2322 if ((vuse = gimple_vuse (stmt)))
2323 val = iterative_hash_expr (vuse, val);
2325 return val;
2328 static hashval_t
2329 real_avail_expr_hash (const void *p)
2331 return ((const struct expr_hash_elt *)p)->hash;
2334 static int
2335 avail_expr_eq (const void *p1, const void *p2)
2337 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2338 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2339 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2340 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2341 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2342 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2344 /* This case should apply only when removing entries from the table. */
2345 if (stamp1 == stamp2)
2346 return true;
2348 /* FIXME tuples:
2349 We add stmts to a hash table and them modify them. To detect the case
2350 that we modify a stmt and then search for it, we assume that the hash
2351 is always modified by that change.
2352 We have to fully check why this doesn't happen on trunk or rewrite
2353 this in a more reliable (and easier to understand) way. */
2354 if (((const struct expr_hash_elt *)p1)->hash
2355 != ((const struct expr_hash_elt *)p2)->hash)
2356 return false;
2358 /* In case of a collision, both RHS have to be identical and have the
2359 same VUSE operands. */
2360 if (hashable_expr_equal_p (expr1, expr2)
2361 && types_compatible_p (expr1->type, expr2->type))
2363 /* Note that STMT1 and/or STMT2 may be NULL. */
2364 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2365 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2368 return false;
2371 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2372 up degenerate PHIs created by or exposed by jump threading. */
2374 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2375 NULL. */
2377 tree
2378 degenerate_phi_result (gimple phi)
2380 tree lhs = gimple_phi_result (phi);
2381 tree val = NULL;
2382 size_t i;
2384 /* Ignoring arguments which are the same as LHS, if all the remaining
2385 arguments are the same, then the PHI is a degenerate and has the
2386 value of that common argument. */
2387 for (i = 0; i < gimple_phi_num_args (phi); i++)
2389 tree arg = gimple_phi_arg_def (phi, i);
2391 if (arg == lhs)
2392 continue;
2393 else if (!arg)
2394 break;
2395 else if (!val)
2396 val = arg;
2397 else if (arg == val)
2398 continue;
2399 /* We bring in some of operand_equal_p not only to speed things
2400 up, but also to avoid crashing when dereferencing the type of
2401 a released SSA name. */
2402 else if (TREE_CODE (val) != TREE_CODE (arg)
2403 || TREE_CODE (val) == SSA_NAME
2404 || !operand_equal_p (arg, val, 0))
2405 break;
2407 return (i == gimple_phi_num_args (phi) ? val : NULL);
2410 /* Given a statement STMT, which is either a PHI node or an assignment,
2411 remove it from the IL. */
2413 static void
2414 remove_stmt_or_phi (gimple stmt)
2416 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2418 if (gimple_code (stmt) == GIMPLE_PHI)
2419 remove_phi_node (&gsi, true);
2420 else
2422 gsi_remove (&gsi, true);
2423 release_defs (stmt);
2427 /* Given a statement STMT, which is either a PHI node or an assignment,
2428 return the "rhs" of the node, in the case of a non-degenerate
2429 phi, NULL is returned. */
2431 static tree
2432 get_rhs_or_phi_arg (gimple stmt)
2434 if (gimple_code (stmt) == GIMPLE_PHI)
2435 return degenerate_phi_result (stmt);
2436 else if (gimple_assign_single_p (stmt))
2437 return gimple_assign_rhs1 (stmt);
2438 else
2439 gcc_unreachable ();
2443 /* Given a statement STMT, which is either a PHI node or an assignment,
2444 return the "lhs" of the node. */
2446 static tree
2447 get_lhs_or_phi_result (gimple stmt)
2449 if (gimple_code (stmt) == GIMPLE_PHI)
2450 return gimple_phi_result (stmt);
2451 else if (is_gimple_assign (stmt))
2452 return gimple_assign_lhs (stmt);
2453 else
2454 gcc_unreachable ();
2457 /* Propagate RHS into all uses of LHS (when possible).
2459 RHS and LHS are derived from STMT, which is passed in solely so
2460 that we can remove it if propagation is successful.
2462 When propagating into a PHI node or into a statement which turns
2463 into a trivial copy or constant initialization, set the
2464 appropriate bit in INTERESTING_NAMEs so that we will visit those
2465 nodes as well in an effort to pick up secondary optimization
2466 opportunities. */
2468 static void
2469 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2471 /* First verify that propagation is valid and isn't going to move a
2472 loop variant variable outside its loop. */
2473 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2474 && (TREE_CODE (rhs) != SSA_NAME
2475 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2476 && may_propagate_copy (lhs, rhs)
2477 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2479 use_operand_p use_p;
2480 imm_use_iterator iter;
2481 gimple use_stmt;
2482 bool all = true;
2484 /* Dump details. */
2485 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, " Replacing '");
2488 print_generic_expr (dump_file, lhs, dump_flags);
2489 fprintf (dump_file, "' with %s '",
2490 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2491 print_generic_expr (dump_file, rhs, dump_flags);
2492 fprintf (dump_file, "'\n");
2495 /* Walk over every use of LHS and try to replace the use with RHS.
2496 At this point the only reason why such a propagation would not
2497 be successful would be if the use occurs in an ASM_EXPR. */
2498 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2500 /* Leave debug stmts alone. If we succeed in propagating
2501 all non-debug uses, we'll drop the DEF, and propagation
2502 into debug stmts will occur then. */
2503 if (gimple_debug_bind_p (use_stmt))
2504 continue;
2506 /* It's not always safe to propagate into an ASM_EXPR. */
2507 if (gimple_code (use_stmt) == GIMPLE_ASM
2508 && ! may_propagate_copy_into_asm (lhs))
2510 all = false;
2511 continue;
2514 /* Dump details. */
2515 if (dump_file && (dump_flags & TDF_DETAILS))
2517 fprintf (dump_file, " Original statement:");
2518 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2521 /* Propagate the RHS into this use of the LHS. */
2522 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2523 propagate_value (use_p, rhs);
2525 /* Special cases to avoid useless calls into the folding
2526 routines, operand scanning, etc.
2528 First, propagation into a PHI may cause the PHI to become
2529 a degenerate, so mark the PHI as interesting. No other
2530 actions are necessary.
2532 Second, if we're propagating a virtual operand and the
2533 propagation does not change the underlying _DECL node for
2534 the virtual operand, then no further actions are necessary. */
2535 if (gimple_code (use_stmt) == GIMPLE_PHI
2536 || (! is_gimple_reg (lhs)
2537 && TREE_CODE (rhs) == SSA_NAME
2538 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2540 /* Dump details. */
2541 if (dump_file && (dump_flags & TDF_DETAILS))
2543 fprintf (dump_file, " Updated statement:");
2544 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2547 /* Propagation into a PHI may expose new degenerate PHIs,
2548 so mark the result of the PHI as interesting. */
2549 if (gimple_code (use_stmt) == GIMPLE_PHI)
2551 tree result = get_lhs_or_phi_result (use_stmt);
2552 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2555 continue;
2558 /* From this point onward we are propagating into a
2559 real statement. Folding may (or may not) be possible,
2560 we may expose new operands, expose dead EH edges,
2561 etc. */
2562 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2563 cannot fold a call that simplifies to a constant,
2564 because the GIMPLE_CALL must be replaced by a
2565 GIMPLE_ASSIGN, and there is no way to effect such a
2566 transformation in-place. We might want to consider
2567 using the more general fold_stmt here. */
2568 fold_stmt_inplace (use_stmt);
2570 /* Sometimes propagation can expose new operands to the
2571 renamer. */
2572 update_stmt (use_stmt);
2574 /* Dump details. */
2575 if (dump_file && (dump_flags & TDF_DETAILS))
2577 fprintf (dump_file, " Updated statement:");
2578 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2581 /* If we replaced a variable index with a constant, then
2582 we would need to update the invariant flag for ADDR_EXPRs. */
2583 if (gimple_assign_single_p (use_stmt)
2584 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2585 recompute_tree_invariant_for_addr_expr
2586 (gimple_assign_rhs1 (use_stmt));
2588 /* If we cleaned up EH information from the statement,
2589 mark its containing block as needing EH cleanups. */
2590 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2592 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2594 fprintf (dump_file, " Flagged to clear EH edges.\n");
2597 /* Propagation may expose new trivial copy/constant propagation
2598 opportunities. */
2599 if (gimple_assign_single_p (use_stmt)
2600 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2601 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2602 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2604 tree result = get_lhs_or_phi_result (use_stmt);
2605 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2608 /* Propagation into these nodes may make certain edges in
2609 the CFG unexecutable. We want to identify them as PHI nodes
2610 at the destination of those unexecutable edges may become
2611 degenerates. */
2612 else if (gimple_code (use_stmt) == GIMPLE_COND
2613 || gimple_code (use_stmt) == GIMPLE_SWITCH
2614 || gimple_code (use_stmt) == GIMPLE_GOTO)
2616 tree val;
2618 if (gimple_code (use_stmt) == GIMPLE_COND)
2619 val = fold_binary_loc (gimple_location (use_stmt),
2620 gimple_cond_code (use_stmt),
2621 boolean_type_node,
2622 gimple_cond_lhs (use_stmt),
2623 gimple_cond_rhs (use_stmt));
2624 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2625 val = gimple_switch_index (use_stmt);
2626 else
2627 val = gimple_goto_dest (use_stmt);
2629 if (val && is_gimple_min_invariant (val))
2631 basic_block bb = gimple_bb (use_stmt);
2632 edge te = find_taken_edge (bb, val);
2633 edge_iterator ei;
2634 edge e;
2635 gimple_stmt_iterator gsi, psi;
2637 /* Remove all outgoing edges except TE. */
2638 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2640 if (e != te)
2642 /* Mark all the PHI nodes at the destination of
2643 the unexecutable edge as interesting. */
2644 for (psi = gsi_start_phis (e->dest);
2645 !gsi_end_p (psi);
2646 gsi_next (&psi))
2648 gimple phi = gsi_stmt (psi);
2650 tree result = gimple_phi_result (phi);
2651 int version = SSA_NAME_VERSION (result);
2653 bitmap_set_bit (interesting_names, version);
2656 te->probability += e->probability;
2658 te->count += e->count;
2659 remove_edge (e);
2660 cfg_altered = true;
2662 else
2663 ei_next (&ei);
2666 gsi = gsi_last_bb (gimple_bb (use_stmt));
2667 gsi_remove (&gsi, true);
2669 /* And fixup the flags on the single remaining edge. */
2670 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2671 te->flags &= ~EDGE_ABNORMAL;
2672 te->flags |= EDGE_FALLTHRU;
2673 if (te->probability > REG_BR_PROB_BASE)
2674 te->probability = REG_BR_PROB_BASE;
2679 /* Ensure there is nothing else to do. */
2680 gcc_assert (!all || has_zero_uses (lhs));
2682 /* If we were able to propagate away all uses of LHS, then
2683 we can remove STMT. */
2684 if (all)
2685 remove_stmt_or_phi (stmt);
2689 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2690 a statement that is a trivial copy or constant initialization.
2692 Attempt to eliminate T by propagating its RHS into all uses of
2693 its LHS. This may in turn set new bits in INTERESTING_NAMES
2694 for nodes we want to revisit later.
2696 All exit paths should clear INTERESTING_NAMES for the result
2697 of STMT. */
2699 static void
2700 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2702 tree lhs = get_lhs_or_phi_result (stmt);
2703 tree rhs;
2704 int version = SSA_NAME_VERSION (lhs);
2706 /* If the LHS of this statement or PHI has no uses, then we can
2707 just eliminate it. This can occur if, for example, the PHI
2708 was created by block duplication due to threading and its only
2709 use was in the conditional at the end of the block which was
2710 deleted. */
2711 if (has_zero_uses (lhs))
2713 bitmap_clear_bit (interesting_names, version);
2714 remove_stmt_or_phi (stmt);
2715 return;
2718 /* Get the RHS of the assignment or PHI node if the PHI is a
2719 degenerate. */
2720 rhs = get_rhs_or_phi_arg (stmt);
2721 if (!rhs)
2723 bitmap_clear_bit (interesting_names, version);
2724 return;
2727 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2729 /* Note that STMT may well have been deleted by now, so do
2730 not access it, instead use the saved version # to clear
2731 T's entry in the worklist. */
2732 bitmap_clear_bit (interesting_names, version);
2735 /* The first phase in degenerate PHI elimination.
2737 Eliminate the degenerate PHIs in BB, then recurse on the
2738 dominator children of BB. */
2740 static void
2741 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2743 gimple_stmt_iterator gsi;
2744 basic_block son;
2746 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2748 gimple phi = gsi_stmt (gsi);
2750 eliminate_const_or_copy (phi, interesting_names);
2753 /* Recurse into the dominator children of BB. */
2754 for (son = first_dom_son (CDI_DOMINATORS, bb);
2755 son;
2756 son = next_dom_son (CDI_DOMINATORS, son))
2757 eliminate_degenerate_phis_1 (son, interesting_names);
2761 /* A very simple pass to eliminate degenerate PHI nodes from the
2762 IL. This is meant to be fast enough to be able to be run several
2763 times in the optimization pipeline.
2765 Certain optimizations, particularly those which duplicate blocks
2766 or remove edges from the CFG can create or expose PHIs which are
2767 trivial copies or constant initializations.
2769 While we could pick up these optimizations in DOM or with the
2770 combination of copy-prop and CCP, those solutions are far too
2771 heavy-weight for our needs.
2773 This implementation has two phases so that we can efficiently
2774 eliminate the first order degenerate PHIs and second order
2775 degenerate PHIs.
2777 The first phase performs a dominator walk to identify and eliminate
2778 the vast majority of the degenerate PHIs. When a degenerate PHI
2779 is identified and eliminated any affected statements or PHIs
2780 are put on a worklist.
2782 The second phase eliminates degenerate PHIs and trivial copies
2783 or constant initializations using the worklist. This is how we
2784 pick up the secondary optimization opportunities with minimal
2785 cost. */
2787 static unsigned int
2788 eliminate_degenerate_phis (void)
2790 bitmap interesting_names;
2791 bitmap interesting_names1;
2793 /* Bitmap of blocks which need EH information updated. We can not
2794 update it on-the-fly as doing so invalidates the dominator tree. */
2795 need_eh_cleanup = BITMAP_ALLOC (NULL);
2797 /* INTERESTING_NAMES is effectively our worklist, indexed by
2798 SSA_NAME_VERSION.
2800 A set bit indicates that the statement or PHI node which
2801 defines the SSA_NAME should be (re)examined to determine if
2802 it has become a degenerate PHI or trivial const/copy propagation
2803 opportunity.
2805 Experiments have show we generally get better compilation
2806 time behavior with bitmaps rather than sbitmaps. */
2807 interesting_names = BITMAP_ALLOC (NULL);
2808 interesting_names1 = BITMAP_ALLOC (NULL);
2810 calculate_dominance_info (CDI_DOMINATORS);
2811 cfg_altered = false;
2813 /* First phase. Eliminate degenerate PHIs via a dominator
2814 walk of the CFG.
2816 Experiments have indicated that we generally get better
2817 compile-time behavior by visiting blocks in the first
2818 phase in dominator order. Presumably this is because walking
2819 in dominator order leaves fewer PHIs for later examination
2820 by the worklist phase. */
2821 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2823 /* Second phase. Eliminate second order degenerate PHIs as well
2824 as trivial copies or constant initializations identified by
2825 the first phase or this phase. Basically we keep iterating
2826 until our set of INTERESTING_NAMEs is empty. */
2827 while (!bitmap_empty_p (interesting_names))
2829 unsigned int i;
2830 bitmap_iterator bi;
2832 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2833 changed during the loop. Copy it to another bitmap and
2834 use that. */
2835 bitmap_copy (interesting_names1, interesting_names);
2837 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2839 tree name = ssa_name (i);
2841 /* Ignore SSA_NAMEs that have been released because
2842 their defining statement was deleted (unreachable). */
2843 if (name)
2844 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2845 interesting_names);
2849 if (cfg_altered)
2850 free_dominance_info (CDI_DOMINATORS);
2852 /* Propagation of const and copies may make some EH edges dead. Purge
2853 such edges from the CFG as needed. */
2854 if (!bitmap_empty_p (need_eh_cleanup))
2856 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2857 BITMAP_FREE (need_eh_cleanup);
2860 BITMAP_FREE (interesting_names);
2861 BITMAP_FREE (interesting_names1);
2862 return 0;
2865 struct gimple_opt_pass pass_phi_only_cprop =
2868 GIMPLE_PASS,
2869 "phicprop", /* name */
2870 gate_dominator, /* gate */
2871 eliminate_degenerate_phis, /* execute */
2872 NULL, /* sub */
2873 NULL, /* next */
2874 0, /* static_pass_number */
2875 TV_TREE_PHI_CPROP, /* tv_id */
2876 PROP_cfg | PROP_ssa, /* properties_required */
2877 0, /* properties_provided */
2878 0, /* properties_destroyed */
2879 0, /* todo_flags_start */
2880 TODO_cleanup_cfg
2881 | TODO_dump_func
2882 | TODO_ggc_collect
2883 | TODO_verify_ssa
2884 | TODO_verify_stmts
2885 | TODO_update_ssa /* todo_flags_finish */