2010-06-20 Tobias Burnus <burnus@net-b.de>
[official-gcc.git] / gcc / tree-ssa-dom.c
blob25eb306369da0f9ef62493b6b6252ebf0935a87b
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "output.h"
32 #include "function.h"
33 #include "tree-pretty-print.h"
34 #include "gimple-pretty-print.h"
35 #include "timevar.h"
36 #include "tree-dump.h"
37 #include "tree-flow.h"
38 #include "domwalk.h"
39 #include "tree-pass.h"
40 #include "tree-ssa-propagate.h"
41 #include "langhooks.h"
42 #include "params.h"
44 /* This file implements optimizations on the dominator tree. */
46 /* Representation of a "naked" right-hand-side expression, to be used
47 in recording available expressions in the expression hash table. */
49 enum expr_kind
51 EXPR_SINGLE,
52 EXPR_UNARY,
53 EXPR_BINARY,
54 EXPR_CALL
57 struct hashable_expr
59 tree type;
60 enum expr_kind kind;
61 union {
62 struct { tree rhs; } single;
63 struct { enum tree_code op; tree opnd; } unary;
64 struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
65 struct { tree fn; bool pure; size_t nargs; tree *args; } call;
66 } ops;
69 /* Structure for recording known values of a conditional expression
70 at the exits from its block. */
72 struct cond_equivalence
74 struct hashable_expr cond;
75 tree value;
78 /* Structure for recording edge equivalences as well as any pending
79 edge redirections during the dominator optimizer.
81 Computing and storing the edge equivalences instead of creating
82 them on-demand can save significant amounts of time, particularly
83 for pathological cases involving switch statements.
85 These structures live for a single iteration of the dominator
86 optimizer in the edge's AUX field. At the end of an iteration we
87 free each of these structures and update the AUX field to point
88 to any requested redirection target (the code for updating the
89 CFG and SSA graph for edge redirection expects redirection edge
90 targets to be in the AUX field for each edge. */
92 struct edge_info
94 /* If this edge creates a simple equivalence, the LHS and RHS of
95 the equivalence will be stored here. */
96 tree lhs;
97 tree rhs;
99 /* Traversing an edge may also indicate one or more particular conditions
100 are true or false. The number of recorded conditions can vary, but
101 can be determined by the condition's code. So we have an array
102 and its maximum index rather than use a varray. */
103 struct cond_equivalence *cond_equivalences;
104 unsigned int max_cond_equivalences;
107 /* Hash table with expressions made available during the renaming process.
108 When an assignment of the form X_i = EXPR is found, the statement is
109 stored in this table. If the same expression EXPR is later found on the
110 RHS of another statement, it is replaced with X_i (thus performing
111 global redundancy elimination). Similarly as we pass through conditionals
112 we record the conditional itself as having either a true or false value
113 in this table. */
114 static htab_t avail_exprs;
116 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
117 expressions it enters into the hash table along with a marker entry
118 (null). When we finish processing the block, we pop off entries and
119 remove the expressions from the global hash table until we hit the
120 marker. */
121 typedef struct expr_hash_elt * expr_hash_elt_t;
122 DEF_VEC_P(expr_hash_elt_t);
123 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
125 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
127 /* Structure for entries in the expression hash table. */
129 struct expr_hash_elt
131 /* The value (lhs) of this expression. */
132 tree lhs;
134 /* The expression (rhs) we want to record. */
135 struct hashable_expr expr;
137 /* The stmt pointer if this element corresponds to a statement. */
138 gimple stmt;
140 /* The hash value for RHS. */
141 hashval_t hash;
143 /* A unique stamp, typically the address of the hash
144 element itself, used in removing entries from the table. */
145 struct expr_hash_elt *stamp;
148 /* Stack of dest,src pairs that need to be restored during finalization.
150 A NULL entry is used to mark the end of pairs which need to be
151 restored during finalization of this block. */
152 static VEC(tree,heap) *const_and_copies_stack;
154 /* Track whether or not we have changed the control flow graph. */
155 static bool cfg_altered;
157 /* Bitmap of blocks that have had EH statements cleaned. We should
158 remove their dead edges eventually. */
159 static bitmap need_eh_cleanup;
161 /* Statistics for dominator optimizations. */
162 struct opt_stats_d
164 long num_stmts;
165 long num_exprs_considered;
166 long num_re;
167 long num_const_prop;
168 long num_copy_prop;
171 static struct opt_stats_d opt_stats;
173 /* Local functions. */
174 static void optimize_stmt (basic_block, gimple_stmt_iterator);
175 static tree lookup_avail_expr (gimple, bool);
176 static hashval_t avail_expr_hash (const void *);
177 static hashval_t real_avail_expr_hash (const void *);
178 static int avail_expr_eq (const void *, const void *);
179 static void htab_statistics (FILE *, htab_t);
180 static void record_cond (struct cond_equivalence *);
181 static void record_const_or_copy (tree, tree);
182 static void record_equality (tree, tree);
183 static void record_equivalences_from_phis (basic_block);
184 static void record_equivalences_from_incoming_edge (basic_block);
185 static void eliminate_redundant_computations (gimple_stmt_iterator *);
186 static void record_equivalences_from_stmt (gimple, int);
187 static void dom_thread_across_edge (struct dom_walk_data *, edge);
188 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
189 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
190 static void remove_local_expressions_from_table (void);
191 static void restore_vars_to_original_value (void);
192 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
195 /* Given a statement STMT, initialize the hash table element pointed to
196 by ELEMENT. */
198 static void
199 initialize_hash_element (gimple stmt, tree lhs,
200 struct expr_hash_elt *element)
202 enum gimple_code code = gimple_code (stmt);
203 struct hashable_expr *expr = &element->expr;
205 if (code == GIMPLE_ASSIGN)
207 enum tree_code subcode = gimple_assign_rhs_code (stmt);
209 expr->type = NULL_TREE;
211 switch (get_gimple_rhs_class (subcode))
213 case GIMPLE_SINGLE_RHS:
214 expr->kind = EXPR_SINGLE;
215 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
216 break;
217 case GIMPLE_UNARY_RHS:
218 expr->kind = EXPR_UNARY;
219 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
220 expr->ops.unary.op = subcode;
221 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
222 break;
223 case GIMPLE_BINARY_RHS:
224 expr->kind = EXPR_BINARY;
225 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
226 expr->ops.binary.op = subcode;
227 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
228 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
229 break;
230 default:
231 gcc_unreachable ();
234 else if (code == GIMPLE_COND)
236 expr->type = boolean_type_node;
237 expr->kind = EXPR_BINARY;
238 expr->ops.binary.op = gimple_cond_code (stmt);
239 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
240 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
242 else if (code == GIMPLE_CALL)
244 size_t nargs = gimple_call_num_args (stmt);
245 size_t i;
247 gcc_assert (gimple_call_lhs (stmt));
249 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
250 expr->kind = EXPR_CALL;
251 expr->ops.call.fn = gimple_call_fn (stmt);
253 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
254 expr->ops.call.pure = true;
255 else
256 expr->ops.call.pure = false;
258 expr->ops.call.nargs = nargs;
259 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
260 for (i = 0; i < nargs; i++)
261 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
263 else if (code == GIMPLE_SWITCH)
265 expr->type = TREE_TYPE (gimple_switch_index (stmt));
266 expr->kind = EXPR_SINGLE;
267 expr->ops.single.rhs = gimple_switch_index (stmt);
269 else if (code == GIMPLE_GOTO)
271 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
272 expr->kind = EXPR_SINGLE;
273 expr->ops.single.rhs = gimple_goto_dest (stmt);
275 else
276 gcc_unreachable ();
278 element->lhs = lhs;
279 element->stmt = stmt;
280 element->hash = avail_expr_hash (element);
281 element->stamp = element;
284 /* Given a conditional expression COND as a tree, initialize
285 a hashable_expr expression EXPR. The conditional must be a
286 comparison or logical negation. A constant or a variable is
287 not permitted. */
289 static void
290 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
292 expr->type = boolean_type_node;
294 if (COMPARISON_CLASS_P (cond))
296 expr->kind = EXPR_BINARY;
297 expr->ops.binary.op = TREE_CODE (cond);
298 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
299 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
301 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
303 expr->kind = EXPR_UNARY;
304 expr->ops.unary.op = TRUTH_NOT_EXPR;
305 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
307 else
308 gcc_unreachable ();
311 /* Given a hashable_expr expression EXPR and an LHS,
312 initialize the hash table element pointed to by ELEMENT. */
314 static void
315 initialize_hash_element_from_expr (struct hashable_expr *expr,
316 tree lhs,
317 struct expr_hash_elt *element)
319 element->expr = *expr;
320 element->lhs = lhs;
321 element->stmt = NULL;
322 element->hash = avail_expr_hash (element);
323 element->stamp = element;
326 /* Compare two hashable_expr structures for equivalence.
327 They are considered equivalent when the the expressions
328 they denote must necessarily be equal. The logic is intended
329 to follow that of operand_equal_p in fold-const.c */
331 static bool
332 hashable_expr_equal_p (const struct hashable_expr *expr0,
333 const struct hashable_expr *expr1)
335 tree type0 = expr0->type;
336 tree type1 = expr1->type;
338 /* If either type is NULL, there is nothing to check. */
339 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
340 return false;
342 /* If both types don't have the same signedness, precision, and mode,
343 then we can't consider them equal. */
344 if (type0 != type1
345 && (TREE_CODE (type0) == ERROR_MARK
346 || TREE_CODE (type1) == ERROR_MARK
347 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
348 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
349 || TYPE_MODE (type0) != TYPE_MODE (type1)))
350 return false;
352 if (expr0->kind != expr1->kind)
353 return false;
355 switch (expr0->kind)
357 case EXPR_SINGLE:
358 return operand_equal_p (expr0->ops.single.rhs,
359 expr1->ops.single.rhs, 0);
361 case EXPR_UNARY:
362 if (expr0->ops.unary.op != expr1->ops.unary.op)
363 return false;
365 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
366 || expr0->ops.unary.op == NON_LVALUE_EXPR)
367 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
368 return false;
370 return operand_equal_p (expr0->ops.unary.opnd,
371 expr1->ops.unary.opnd, 0);
373 case EXPR_BINARY:
375 if (expr0->ops.binary.op != expr1->ops.binary.op)
376 return false;
378 if (operand_equal_p (expr0->ops.binary.opnd0,
379 expr1->ops.binary.opnd0, 0)
380 && operand_equal_p (expr0->ops.binary.opnd1,
381 expr1->ops.binary.opnd1, 0))
382 return true;
384 /* For commutative ops, allow the other order. */
385 return (commutative_tree_code (expr0->ops.binary.op)
386 && operand_equal_p (expr0->ops.binary.opnd0,
387 expr1->ops.binary.opnd1, 0)
388 && operand_equal_p (expr0->ops.binary.opnd1,
389 expr1->ops.binary.opnd0, 0));
392 case EXPR_CALL:
394 size_t i;
396 /* If the calls are to different functions, then they
397 clearly cannot be equal. */
398 if (! operand_equal_p (expr0->ops.call.fn,
399 expr1->ops.call.fn, 0))
400 return false;
402 if (! expr0->ops.call.pure)
403 return false;
405 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
406 return false;
408 for (i = 0; i < expr0->ops.call.nargs; i++)
409 if (! operand_equal_p (expr0->ops.call.args[i],
410 expr1->ops.call.args[i], 0))
411 return false;
413 return true;
416 default:
417 gcc_unreachable ();
421 /* Compute a hash value for a hashable_expr value EXPR and a
422 previously accumulated hash value VAL. If two hashable_expr
423 values compare equal with hashable_expr_equal_p, they must
424 hash to the same value, given an identical value of VAL.
425 The logic is intended to follow iterative_hash_expr in tree.c. */
427 static hashval_t
428 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
430 switch (expr->kind)
432 case EXPR_SINGLE:
433 val = iterative_hash_expr (expr->ops.single.rhs, val);
434 break;
436 case EXPR_UNARY:
437 val = iterative_hash_object (expr->ops.unary.op, val);
439 /* Make sure to include signedness in the hash computation.
440 Don't hash the type, that can lead to having nodes which
441 compare equal according to operand_equal_p, but which
442 have different hash codes. */
443 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
444 || expr->ops.unary.op == NON_LVALUE_EXPR)
445 val += TYPE_UNSIGNED (expr->type);
447 val = iterative_hash_expr (expr->ops.unary.opnd, val);
448 break;
450 case EXPR_BINARY:
451 val = iterative_hash_object (expr->ops.binary.op, val);
452 if (commutative_tree_code (expr->ops.binary.op))
453 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
454 expr->ops.binary.opnd1, val);
455 else
457 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
458 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
460 break;
462 case EXPR_CALL:
464 size_t i;
465 enum tree_code code = CALL_EXPR;
467 val = iterative_hash_object (code, val);
468 val = iterative_hash_expr (expr->ops.call.fn, val);
469 for (i = 0; i < expr->ops.call.nargs; i++)
470 val = iterative_hash_expr (expr->ops.call.args[i], val);
472 break;
474 default:
475 gcc_unreachable ();
478 return val;
481 /* Print a diagnostic dump of an expression hash table entry. */
483 static void
484 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
486 if (element->stmt)
487 fprintf (stream, "STMT ");
488 else
489 fprintf (stream, "COND ");
491 if (element->lhs)
493 print_generic_expr (stream, element->lhs, 0);
494 fprintf (stream, " = ");
497 switch (element->expr.kind)
499 case EXPR_SINGLE:
500 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
501 break;
503 case EXPR_UNARY:
504 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
505 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
506 break;
508 case EXPR_BINARY:
509 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
510 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
511 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
512 break;
514 case EXPR_CALL:
516 size_t i;
517 size_t nargs = element->expr.ops.call.nargs;
519 print_generic_expr (stream, element->expr.ops.call.fn, 0);
520 fprintf (stream, " (");
521 for (i = 0; i < nargs; i++)
523 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
524 if (i + 1 < nargs)
525 fprintf (stream, ", ");
527 fprintf (stream, ")");
529 break;
531 fprintf (stream, "\n");
533 if (element->stmt)
535 fprintf (stream, " ");
536 print_gimple_stmt (stream, element->stmt, 0, 0);
540 /* Delete an expr_hash_elt and reclaim its storage. */
542 static void
543 free_expr_hash_elt (void *elt)
545 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
547 if (element->expr.kind == EXPR_CALL)
548 free (element->expr.ops.call.args);
550 free (element);
553 /* Allocate an EDGE_INFO for edge E and attach it to E.
554 Return the new EDGE_INFO structure. */
556 static struct edge_info *
557 allocate_edge_info (edge e)
559 struct edge_info *edge_info;
561 edge_info = XCNEW (struct edge_info);
563 e->aux = edge_info;
564 return edge_info;
567 /* Free all EDGE_INFO structures associated with edges in the CFG.
568 If a particular edge can be threaded, copy the redirection
569 target from the EDGE_INFO structure into the edge's AUX field
570 as required by code to update the CFG and SSA graph for
571 jump threading. */
573 static void
574 free_all_edge_infos (void)
576 basic_block bb;
577 edge_iterator ei;
578 edge e;
580 FOR_EACH_BB (bb)
582 FOR_EACH_EDGE (e, ei, bb->preds)
584 struct edge_info *edge_info = (struct edge_info *) e->aux;
586 if (edge_info)
588 if (edge_info->cond_equivalences)
589 free (edge_info->cond_equivalences);
590 free (edge_info);
591 e->aux = NULL;
597 /* Jump threading, redundancy elimination and const/copy propagation.
599 This pass may expose new symbols that need to be renamed into SSA. For
600 every new symbol exposed, its corresponding bit will be set in
601 VARS_TO_RENAME. */
603 static unsigned int
604 tree_ssa_dominator_optimize (void)
606 struct dom_walk_data walk_data;
608 memset (&opt_stats, 0, sizeof (opt_stats));
610 /* Create our hash tables. */
611 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
612 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
613 const_and_copies_stack = VEC_alloc (tree, heap, 20);
614 need_eh_cleanup = BITMAP_ALLOC (NULL);
616 /* Setup callbacks for the generic dominator tree walker. */
617 walk_data.dom_direction = CDI_DOMINATORS;
618 walk_data.initialize_block_local_data = NULL;
619 walk_data.before_dom_children = dom_opt_enter_block;
620 walk_data.after_dom_children = dom_opt_leave_block;
621 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
622 When we attach more stuff we'll need to fill this out with a real
623 structure. */
624 walk_data.global_data = NULL;
625 walk_data.block_local_data_size = 0;
627 /* Now initialize the dominator walker. */
628 init_walk_dominator_tree (&walk_data);
630 calculate_dominance_info (CDI_DOMINATORS);
631 cfg_altered = false;
633 /* We need to know loop structures in order to avoid destroying them
634 in jump threading. Note that we still can e.g. thread through loop
635 headers to an exit edge, or through loop header to the loop body, assuming
636 that we update the loop info. */
637 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
639 /* Initialize the value-handle array. */
640 threadedge_initialize_values ();
642 /* We need accurate information regarding back edges in the CFG
643 for jump threading; this may include back edges that are not part of
644 a single loop. */
645 mark_dfs_back_edges ();
647 /* Recursively walk the dominator tree optimizing statements. */
648 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
651 gimple_stmt_iterator gsi;
652 basic_block bb;
653 FOR_EACH_BB (bb)
654 {for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
655 update_stmt_if_modified (gsi_stmt (gsi));
659 /* If we exposed any new variables, go ahead and put them into
660 SSA form now, before we handle jump threading. This simplifies
661 interactions between rewriting of _DECL nodes into SSA form
662 and rewriting SSA_NAME nodes into SSA form after block
663 duplication and CFG manipulation. */
664 update_ssa (TODO_update_ssa);
666 free_all_edge_infos ();
668 /* Thread jumps, creating duplicate blocks as needed. */
669 cfg_altered |= thread_through_all_blocks (first_pass_instance);
671 if (cfg_altered)
672 free_dominance_info (CDI_DOMINATORS);
674 /* Removal of statements may make some EH edges dead. Purge
675 such edges from the CFG as needed. */
676 if (!bitmap_empty_p (need_eh_cleanup))
678 unsigned i;
679 bitmap_iterator bi;
681 /* Jump threading may have created forwarder blocks from blocks
682 needing EH cleanup; the new successor of these blocks, which
683 has inherited from the original block, needs the cleanup. */
684 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
686 basic_block bb = BASIC_BLOCK (i);
687 if (single_succ_p (bb) == 1
688 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
690 bitmap_clear_bit (need_eh_cleanup, i);
691 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
695 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
696 bitmap_zero (need_eh_cleanup);
699 statistics_counter_event (cfun, "Redundant expressions eliminated",
700 opt_stats.num_re);
701 statistics_counter_event (cfun, "Constants propagated",
702 opt_stats.num_const_prop);
703 statistics_counter_event (cfun, "Copies propagated",
704 opt_stats.num_copy_prop);
706 /* Debugging dumps. */
707 if (dump_file && (dump_flags & TDF_STATS))
708 dump_dominator_optimization_stats (dump_file);
710 loop_optimizer_finalize ();
712 /* Delete our main hashtable. */
713 htab_delete (avail_exprs);
715 /* And finalize the dominator walker. */
716 fini_walk_dominator_tree (&walk_data);
718 /* Free asserted bitmaps and stacks. */
719 BITMAP_FREE (need_eh_cleanup);
721 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
722 VEC_free (tree, heap, const_and_copies_stack);
724 /* Free the value-handle array. */
725 threadedge_finalize_values ();
726 ssa_name_values = NULL;
728 return 0;
731 static bool
732 gate_dominator (void)
734 return flag_tree_dom != 0;
737 struct gimple_opt_pass pass_dominator =
740 GIMPLE_PASS,
741 "dom", /* name */
742 gate_dominator, /* gate */
743 tree_ssa_dominator_optimize, /* execute */
744 NULL, /* sub */
745 NULL, /* next */
746 0, /* static_pass_number */
747 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
748 PROP_cfg | PROP_ssa, /* properties_required */
749 0, /* properties_provided */
750 0, /* properties_destroyed */
751 0, /* todo_flags_start */
752 TODO_dump_func
753 | TODO_update_ssa
754 | TODO_cleanup_cfg
755 | TODO_verify_ssa /* todo_flags_finish */
760 /* Given a conditional statement CONDSTMT, convert the
761 condition to a canonical form. */
763 static void
764 canonicalize_comparison (gimple condstmt)
766 tree op0;
767 tree op1;
768 enum tree_code code;
770 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
772 op0 = gimple_cond_lhs (condstmt);
773 op1 = gimple_cond_rhs (condstmt);
775 code = gimple_cond_code (condstmt);
777 /* If it would be profitable to swap the operands, then do so to
778 canonicalize the statement, enabling better optimization.
780 By placing canonicalization of such expressions here we
781 transparently keep statements in canonical form, even
782 when the statement is modified. */
783 if (tree_swap_operands_p (op0, op1, false))
785 /* For relationals we need to swap the operands
786 and change the code. */
787 if (code == LT_EXPR
788 || code == GT_EXPR
789 || code == LE_EXPR
790 || code == GE_EXPR)
792 code = swap_tree_comparison (code);
794 gimple_cond_set_code (condstmt, code);
795 gimple_cond_set_lhs (condstmt, op1);
796 gimple_cond_set_rhs (condstmt, op0);
798 update_stmt (condstmt);
803 /* Initialize local stacks for this optimizer and record equivalences
804 upon entry to BB. Equivalences can come from the edge traversed to
805 reach BB or they may come from PHI nodes at the start of BB. */
807 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
808 LIMIT entries left in LOCALs. */
810 static void
811 remove_local_expressions_from_table (void)
813 /* Remove all the expressions made available in this block. */
814 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
816 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
817 void **slot;
819 if (victim == NULL)
820 break;
822 /* This must precede the actual removal from the hash table,
823 as ELEMENT and the table entry may share a call argument
824 vector which will be freed during removal. */
825 if (dump_file && (dump_flags & TDF_DETAILS))
827 fprintf (dump_file, "<<<< ");
828 print_expr_hash_elt (dump_file, victim);
831 slot = htab_find_slot_with_hash (avail_exprs,
832 victim, victim->hash, NO_INSERT);
833 gcc_assert (slot && *slot == (void *) victim);
834 htab_clear_slot (avail_exprs, slot);
838 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
839 CONST_AND_COPIES to its original state, stopping when we hit a
840 NULL marker. */
842 static void
843 restore_vars_to_original_value (void)
845 while (VEC_length (tree, const_and_copies_stack) > 0)
847 tree prev_value, dest;
849 dest = VEC_pop (tree, const_and_copies_stack);
851 if (dest == NULL)
852 break;
854 if (dump_file && (dump_flags & TDF_DETAILS))
856 fprintf (dump_file, "<<<< COPY ");
857 print_generic_expr (dump_file, dest, 0);
858 fprintf (dump_file, " = ");
859 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
860 fprintf (dump_file, "\n");
863 prev_value = VEC_pop (tree, const_and_copies_stack);
864 set_ssa_name_value (dest, prev_value);
868 /* A trivial wrapper so that we can present the generic jump
869 threading code with a simple API for simplifying statements. */
870 static tree
871 simplify_stmt_for_jump_threading (gimple stmt,
872 gimple within_stmt ATTRIBUTE_UNUSED)
874 return lookup_avail_expr (stmt, false);
877 /* Wrapper for common code to attempt to thread an edge. For example,
878 it handles lazily building the dummy condition and the bookkeeping
879 when jump threading is successful. */
881 static void
882 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
884 if (! walk_data->global_data)
886 gimple dummy_cond =
887 gimple_build_cond (NE_EXPR,
888 integer_zero_node, integer_zero_node,
889 NULL, NULL);
890 walk_data->global_data = dummy_cond;
893 thread_across_edge ((gimple) walk_data->global_data, e, false,
894 &const_and_copies_stack,
895 simplify_stmt_for_jump_threading);
898 /* PHI nodes can create equivalences too.
900 Ignoring any alternatives which are the same as the result, if
901 all the alternatives are equal, then the PHI node creates an
902 equivalence. */
904 static void
905 record_equivalences_from_phis (basic_block bb)
907 gimple_stmt_iterator gsi;
909 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
911 gimple phi = gsi_stmt (gsi);
913 tree lhs = gimple_phi_result (phi);
914 tree rhs = NULL;
915 size_t i;
917 for (i = 0; i < gimple_phi_num_args (phi); i++)
919 tree t = gimple_phi_arg_def (phi, i);
921 /* Ignore alternatives which are the same as our LHS. Since
922 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
923 can simply compare pointers. */
924 if (lhs == t)
925 continue;
927 /* If we have not processed an alternative yet, then set
928 RHS to this alternative. */
929 if (rhs == NULL)
930 rhs = t;
931 /* If we have processed an alternative (stored in RHS), then
932 see if it is equal to this one. If it isn't, then stop
933 the search. */
934 else if (! operand_equal_for_phi_arg_p (rhs, t))
935 break;
938 /* If we had no interesting alternatives, then all the RHS alternatives
939 must have been the same as LHS. */
940 if (!rhs)
941 rhs = lhs;
943 /* If we managed to iterate through each PHI alternative without
944 breaking out of the loop, then we have a PHI which may create
945 a useful equivalence. We do not need to record unwind data for
946 this, since this is a true assignment and not an equivalence
947 inferred from a comparison. All uses of this ssa name are dominated
948 by this assignment, so unwinding just costs time and space. */
949 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
950 set_ssa_name_value (lhs, rhs);
954 /* Ignoring loop backedges, if BB has precisely one incoming edge then
955 return that edge. Otherwise return NULL. */
956 static edge
957 single_incoming_edge_ignoring_loop_edges (basic_block bb)
959 edge retval = NULL;
960 edge e;
961 edge_iterator ei;
963 FOR_EACH_EDGE (e, ei, bb->preds)
965 /* A loop back edge can be identified by the destination of
966 the edge dominating the source of the edge. */
967 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
968 continue;
970 /* If we have already seen a non-loop edge, then we must have
971 multiple incoming non-loop edges and thus we return NULL. */
972 if (retval)
973 return NULL;
975 /* This is the first non-loop incoming edge we have found. Record
976 it. */
977 retval = e;
980 return retval;
983 /* Record any equivalences created by the incoming edge to BB. If BB
984 has more than one incoming edge, then no equivalence is created. */
986 static void
987 record_equivalences_from_incoming_edge (basic_block bb)
989 edge e;
990 basic_block parent;
991 struct edge_info *edge_info;
993 /* If our parent block ended with a control statement, then we may be
994 able to record some equivalences based on which outgoing edge from
995 the parent was followed. */
996 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
998 e = single_incoming_edge_ignoring_loop_edges (bb);
1000 /* If we had a single incoming edge from our parent block, then enter
1001 any data associated with the edge into our tables. */
1002 if (e && e->src == parent)
1004 unsigned int i;
1006 edge_info = (struct edge_info *) e->aux;
1008 if (edge_info)
1010 tree lhs = edge_info->lhs;
1011 tree rhs = edge_info->rhs;
1012 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1014 if (lhs)
1015 record_equality (lhs, rhs);
1017 if (cond_equivalences)
1018 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1019 record_cond (&cond_equivalences[i]);
1024 /* Dump SSA statistics on FILE. */
1026 void
1027 dump_dominator_optimization_stats (FILE *file)
1029 fprintf (file, "Total number of statements: %6ld\n\n",
1030 opt_stats.num_stmts);
1031 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1032 opt_stats.num_exprs_considered);
1034 fprintf (file, "\nHash table statistics:\n");
1036 fprintf (file, " avail_exprs: ");
1037 htab_statistics (file, avail_exprs);
1041 /* Dump SSA statistics on stderr. */
1043 DEBUG_FUNCTION void
1044 debug_dominator_optimization_stats (void)
1046 dump_dominator_optimization_stats (stderr);
1050 /* Dump statistics for the hash table HTAB. */
1052 static void
1053 htab_statistics (FILE *file, htab_t htab)
1055 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1056 (long) htab_size (htab),
1057 (long) htab_elements (htab),
1058 htab_collisions (htab));
1062 /* Enter condition equivalence into the expression hash table.
1063 This indicates that a conditional expression has a known
1064 boolean value. */
1066 static void
1067 record_cond (struct cond_equivalence *p)
1069 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1070 void **slot;
1072 initialize_hash_element_from_expr (&p->cond, p->value, element);
1074 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1075 element->hash, INSERT);
1076 if (*slot == NULL)
1078 *slot = (void *) element;
1080 if (dump_file && (dump_flags & TDF_DETAILS))
1082 fprintf (dump_file, "1>>> ");
1083 print_expr_hash_elt (dump_file, element);
1086 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1088 else
1089 free (element);
1092 /* Build a cond_equivalence record indicating that the comparison
1093 CODE holds between operands OP0 and OP1. */
1095 static void
1096 build_and_record_new_cond (enum tree_code code,
1097 tree op0, tree op1,
1098 struct cond_equivalence *p)
1100 struct hashable_expr *cond = &p->cond;
1102 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1104 cond->type = boolean_type_node;
1105 cond->kind = EXPR_BINARY;
1106 cond->ops.binary.op = code;
1107 cond->ops.binary.opnd0 = op0;
1108 cond->ops.binary.opnd1 = op1;
1110 p->value = boolean_true_node;
1113 /* Record that COND is true and INVERTED is false into the edge information
1114 structure. Also record that any conditions dominated by COND are true
1115 as well.
1117 For example, if a < b is true, then a <= b must also be true. */
1119 static void
1120 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1122 tree op0, op1;
1124 if (!COMPARISON_CLASS_P (cond))
1125 return;
1127 op0 = TREE_OPERAND (cond, 0);
1128 op1 = TREE_OPERAND (cond, 1);
1130 switch (TREE_CODE (cond))
1132 case LT_EXPR:
1133 case GT_EXPR:
1134 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1136 edge_info->max_cond_equivalences = 6;
1137 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 6);
1138 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1139 &edge_info->cond_equivalences[4]);
1140 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1141 &edge_info->cond_equivalences[5]);
1143 else
1145 edge_info->max_cond_equivalences = 4;
1146 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1149 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1150 ? LE_EXPR : GE_EXPR),
1151 op0, op1, &edge_info->cond_equivalences[2]);
1152 build_and_record_new_cond (NE_EXPR, op0, op1,
1153 &edge_info->cond_equivalences[3]);
1154 break;
1156 case GE_EXPR:
1157 case LE_EXPR:
1158 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1160 edge_info->max_cond_equivalences = 3;
1161 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 3);
1162 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1163 &edge_info->cond_equivalences[2]);
1165 else
1167 edge_info->max_cond_equivalences = 2;
1168 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1170 break;
1172 case EQ_EXPR:
1173 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1175 edge_info->max_cond_equivalences = 5;
1176 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 5);
1177 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1178 &edge_info->cond_equivalences[4]);
1180 else
1182 edge_info->max_cond_equivalences = 4;
1183 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1185 build_and_record_new_cond (LE_EXPR, op0, op1,
1186 &edge_info->cond_equivalences[2]);
1187 build_and_record_new_cond (GE_EXPR, op0, op1,
1188 &edge_info->cond_equivalences[3]);
1189 break;
1191 case UNORDERED_EXPR:
1192 edge_info->max_cond_equivalences = 8;
1193 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 8);
1194 build_and_record_new_cond (NE_EXPR, op0, op1,
1195 &edge_info->cond_equivalences[2]);
1196 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1197 &edge_info->cond_equivalences[3]);
1198 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1199 &edge_info->cond_equivalences[4]);
1200 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1201 &edge_info->cond_equivalences[5]);
1202 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1203 &edge_info->cond_equivalences[6]);
1204 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1205 &edge_info->cond_equivalences[7]);
1206 break;
1208 case UNLT_EXPR:
1209 case UNGT_EXPR:
1210 edge_info->max_cond_equivalences = 4;
1211 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1212 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1213 ? UNLE_EXPR : UNGE_EXPR),
1214 op0, op1, &edge_info->cond_equivalences[2]);
1215 build_and_record_new_cond (NE_EXPR, op0, op1,
1216 &edge_info->cond_equivalences[3]);
1217 break;
1219 case UNEQ_EXPR:
1220 edge_info->max_cond_equivalences = 4;
1221 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1222 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1223 &edge_info->cond_equivalences[2]);
1224 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1225 &edge_info->cond_equivalences[3]);
1226 break;
1228 case LTGT_EXPR:
1229 edge_info->max_cond_equivalences = 4;
1230 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1231 build_and_record_new_cond (NE_EXPR, op0, op1,
1232 &edge_info->cond_equivalences[2]);
1233 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1234 &edge_info->cond_equivalences[3]);
1235 break;
1237 default:
1238 edge_info->max_cond_equivalences = 2;
1239 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1240 break;
1243 /* Now store the original true and false conditions into the first
1244 two slots. */
1245 initialize_expr_from_cond (cond, &edge_info->cond_equivalences[0].cond);
1246 edge_info->cond_equivalences[0].value = boolean_true_node;
1248 /* It is possible for INVERTED to be the negation of a comparison,
1249 and not a valid RHS or GIMPLE_COND condition. This happens because
1250 invert_truthvalue may return such an expression when asked to invert
1251 a floating-point comparison. These comparisons are not assumed to
1252 obey the trichotomy law. */
1253 initialize_expr_from_cond (inverted, &edge_info->cond_equivalences[1].cond);
1254 edge_info->cond_equivalences[1].value = boolean_false_node;
1257 /* A helper function for record_const_or_copy and record_equality.
1258 Do the work of recording the value and undo info. */
1260 static void
1261 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1263 set_ssa_name_value (x, y);
1265 if (dump_file && (dump_flags & TDF_DETAILS))
1267 fprintf (dump_file, "0>>> COPY ");
1268 print_generic_expr (dump_file, x, 0);
1269 fprintf (dump_file, " = ");
1270 print_generic_expr (dump_file, y, 0);
1271 fprintf (dump_file, "\n");
1274 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1275 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1276 VEC_quick_push (tree, const_and_copies_stack, x);
1279 /* Return the loop depth of the basic block of the defining statement of X.
1280 This number should not be treated as absolutely correct because the loop
1281 information may not be completely up-to-date when dom runs. However, it
1282 will be relatively correct, and as more passes are taught to keep loop info
1283 up to date, the result will become more and more accurate. */
1286 loop_depth_of_name (tree x)
1288 gimple defstmt;
1289 basic_block defbb;
1291 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1292 if (TREE_CODE (x) != SSA_NAME)
1293 return 0;
1295 /* Otherwise return the loop depth of the defining statement's bb.
1296 Note that there may not actually be a bb for this statement, if the
1297 ssa_name is live on entry. */
1298 defstmt = SSA_NAME_DEF_STMT (x);
1299 defbb = gimple_bb (defstmt);
1300 if (!defbb)
1301 return 0;
1303 return defbb->loop_depth;
1306 /* Record that X is equal to Y in const_and_copies. Record undo
1307 information in the block-local vector. */
1309 static void
1310 record_const_or_copy (tree x, tree y)
1312 tree prev_x = SSA_NAME_VALUE (x);
1314 gcc_assert (TREE_CODE (x) == SSA_NAME);
1316 if (TREE_CODE (y) == SSA_NAME)
1318 tree tmp = SSA_NAME_VALUE (y);
1319 if (tmp)
1320 y = tmp;
1323 record_const_or_copy_1 (x, y, prev_x);
1326 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1327 This constrains the cases in which we may treat this as assignment. */
1329 static void
1330 record_equality (tree x, tree y)
1332 tree prev_x = NULL, prev_y = NULL;
1334 if (TREE_CODE (x) == SSA_NAME)
1335 prev_x = SSA_NAME_VALUE (x);
1336 if (TREE_CODE (y) == SSA_NAME)
1337 prev_y = SSA_NAME_VALUE (y);
1339 /* If one of the previous values is invariant, or invariant in more loops
1340 (by depth), then use that.
1341 Otherwise it doesn't matter which value we choose, just so
1342 long as we canonicalize on one value. */
1343 if (is_gimple_min_invariant (y))
1345 else if (is_gimple_min_invariant (x)
1346 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1347 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1348 else if (prev_x && is_gimple_min_invariant (prev_x))
1349 x = y, y = prev_x, prev_x = prev_y;
1350 else if (prev_y)
1351 y = prev_y;
1353 /* After the swapping, we must have one SSA_NAME. */
1354 if (TREE_CODE (x) != SSA_NAME)
1355 return;
1357 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1358 variable compared against zero. If we're honoring signed zeros,
1359 then we cannot record this value unless we know that the value is
1360 nonzero. */
1361 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1362 && (TREE_CODE (y) != REAL_CST
1363 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1364 return;
1366 record_const_or_copy_1 (x, y, prev_x);
1369 /* Returns true when STMT is a simple iv increment. It detects the
1370 following situation:
1372 i_1 = phi (..., i_2)
1373 i_2 = i_1 +/- ... */
1375 static bool
1376 simple_iv_increment_p (gimple stmt)
1378 tree lhs, preinc;
1379 gimple phi;
1380 size_t i;
1382 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1383 return false;
1385 lhs = gimple_assign_lhs (stmt);
1386 if (TREE_CODE (lhs) != SSA_NAME)
1387 return false;
1389 if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
1390 && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
1391 return false;
1393 preinc = gimple_assign_rhs1 (stmt);
1395 if (TREE_CODE (preinc) != SSA_NAME)
1396 return false;
1398 phi = SSA_NAME_DEF_STMT (preinc);
1399 if (gimple_code (phi) != GIMPLE_PHI)
1400 return false;
1402 for (i = 0; i < gimple_phi_num_args (phi); i++)
1403 if (gimple_phi_arg_def (phi, i) == lhs)
1404 return true;
1406 return false;
1409 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1410 known value for that SSA_NAME (or NULL if no value is known).
1412 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1413 successors of BB. */
1415 static void
1416 cprop_into_successor_phis (basic_block bb)
1418 edge e;
1419 edge_iterator ei;
1421 FOR_EACH_EDGE (e, ei, bb->succs)
1423 int indx;
1424 gimple_stmt_iterator gsi;
1426 /* If this is an abnormal edge, then we do not want to copy propagate
1427 into the PHI alternative associated with this edge. */
1428 if (e->flags & EDGE_ABNORMAL)
1429 continue;
1431 gsi = gsi_start_phis (e->dest);
1432 if (gsi_end_p (gsi))
1433 continue;
1435 indx = e->dest_idx;
1436 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1438 tree new_val;
1439 use_operand_p orig_p;
1440 tree orig_val;
1441 gimple phi = gsi_stmt (gsi);
1443 /* The alternative may be associated with a constant, so verify
1444 it is an SSA_NAME before doing anything with it. */
1445 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1446 orig_val = get_use_from_ptr (orig_p);
1447 if (TREE_CODE (orig_val) != SSA_NAME)
1448 continue;
1450 /* If we have *ORIG_P in our constant/copy table, then replace
1451 ORIG_P with its value in our constant/copy table. */
1452 new_val = SSA_NAME_VALUE (orig_val);
1453 if (new_val
1454 && new_val != orig_val
1455 && (TREE_CODE (new_val) == SSA_NAME
1456 || is_gimple_min_invariant (new_val))
1457 && may_propagate_copy (orig_val, new_val))
1458 propagate_value (orig_p, new_val);
1463 /* We have finished optimizing BB, record any information implied by
1464 taking a specific outgoing edge from BB. */
1466 static void
1467 record_edge_info (basic_block bb)
1469 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1470 struct edge_info *edge_info;
1472 if (! gsi_end_p (gsi))
1474 gimple stmt = gsi_stmt (gsi);
1475 location_t loc = gimple_location (stmt);
1477 if (gimple_code (stmt) == GIMPLE_SWITCH)
1479 tree index = gimple_switch_index (stmt);
1481 if (TREE_CODE (index) == SSA_NAME)
1483 int i;
1484 int n_labels = gimple_switch_num_labels (stmt);
1485 tree *info = XCNEWVEC (tree, last_basic_block);
1486 edge e;
1487 edge_iterator ei;
1489 for (i = 0; i < n_labels; i++)
1491 tree label = gimple_switch_label (stmt, i);
1492 basic_block target_bb = label_to_block (CASE_LABEL (label));
1493 if (CASE_HIGH (label)
1494 || !CASE_LOW (label)
1495 || info[target_bb->index])
1496 info[target_bb->index] = error_mark_node;
1497 else
1498 info[target_bb->index] = label;
1501 FOR_EACH_EDGE (e, ei, bb->succs)
1503 basic_block target_bb = e->dest;
1504 tree label = info[target_bb->index];
1506 if (label != NULL && label != error_mark_node)
1508 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1509 CASE_LOW (label));
1510 edge_info = allocate_edge_info (e);
1511 edge_info->lhs = index;
1512 edge_info->rhs = x;
1515 free (info);
1519 /* A COND_EXPR may create equivalences too. */
1520 if (gimple_code (stmt) == GIMPLE_COND)
1522 edge true_edge;
1523 edge false_edge;
1525 tree op0 = gimple_cond_lhs (stmt);
1526 tree op1 = gimple_cond_rhs (stmt);
1527 enum tree_code code = gimple_cond_code (stmt);
1529 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1531 /* Special case comparing booleans against a constant as we
1532 know the value of OP0 on both arms of the branch. i.e., we
1533 can record an equivalence for OP0 rather than COND. */
1534 if ((code == EQ_EXPR || code == NE_EXPR)
1535 && TREE_CODE (op0) == SSA_NAME
1536 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1537 && is_gimple_min_invariant (op1))
1539 if (code == EQ_EXPR)
1541 edge_info = allocate_edge_info (true_edge);
1542 edge_info->lhs = op0;
1543 edge_info->rhs = (integer_zerop (op1)
1544 ? boolean_false_node
1545 : boolean_true_node);
1547 edge_info = allocate_edge_info (false_edge);
1548 edge_info->lhs = op0;
1549 edge_info->rhs = (integer_zerop (op1)
1550 ? boolean_true_node
1551 : boolean_false_node);
1553 else
1555 edge_info = allocate_edge_info (true_edge);
1556 edge_info->lhs = op0;
1557 edge_info->rhs = (integer_zerop (op1)
1558 ? boolean_true_node
1559 : boolean_false_node);
1561 edge_info = allocate_edge_info (false_edge);
1562 edge_info->lhs = op0;
1563 edge_info->rhs = (integer_zerop (op1)
1564 ? boolean_false_node
1565 : boolean_true_node);
1568 else if (is_gimple_min_invariant (op0)
1569 && (TREE_CODE (op1) == SSA_NAME
1570 || is_gimple_min_invariant (op1)))
1572 tree cond = build2 (code, boolean_type_node, op0, op1);
1573 tree inverted = invert_truthvalue_loc (loc, cond);
1574 struct edge_info *edge_info;
1576 edge_info = allocate_edge_info (true_edge);
1577 record_conditions (edge_info, cond, inverted);
1579 if (code == EQ_EXPR)
1581 edge_info->lhs = op1;
1582 edge_info->rhs = op0;
1585 edge_info = allocate_edge_info (false_edge);
1586 record_conditions (edge_info, inverted, cond);
1588 if (code == NE_EXPR)
1590 edge_info->lhs = op1;
1591 edge_info->rhs = op0;
1595 else if (TREE_CODE (op0) == SSA_NAME
1596 && (is_gimple_min_invariant (op1)
1597 || TREE_CODE (op1) == SSA_NAME))
1599 tree cond = build2 (code, boolean_type_node, op0, op1);
1600 tree inverted = invert_truthvalue_loc (loc, cond);
1601 struct edge_info *edge_info;
1603 edge_info = allocate_edge_info (true_edge);
1604 record_conditions (edge_info, cond, inverted);
1606 if (code == EQ_EXPR)
1608 edge_info->lhs = op0;
1609 edge_info->rhs = op1;
1612 edge_info = allocate_edge_info (false_edge);
1613 record_conditions (edge_info, inverted, cond);
1615 if (TREE_CODE (cond) == NE_EXPR)
1617 edge_info->lhs = op0;
1618 edge_info->rhs = op1;
1623 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1627 static void
1628 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1629 basic_block bb)
1631 gimple_stmt_iterator gsi;
1633 if (dump_file && (dump_flags & TDF_DETAILS))
1634 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1636 /* Push a marker on the stacks of local information so that we know how
1637 far to unwind when we finalize this block. */
1638 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1639 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1641 record_equivalences_from_incoming_edge (bb);
1643 /* PHI nodes can create equivalences too. */
1644 record_equivalences_from_phis (bb);
1646 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1647 optimize_stmt (bb, gsi);
1649 /* Now prepare to process dominated blocks. */
1650 record_edge_info (bb);
1651 cprop_into_successor_phis (bb);
1654 /* We have finished processing the dominator children of BB, perform
1655 any finalization actions in preparation for leaving this node in
1656 the dominator tree. */
1658 static void
1659 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1661 gimple last;
1663 /* If we have an outgoing edge to a block with multiple incoming and
1664 outgoing edges, then we may be able to thread the edge, i.e., we
1665 may be able to statically determine which of the outgoing edges
1666 will be traversed when the incoming edge from BB is traversed. */
1667 if (single_succ_p (bb)
1668 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1669 && potentially_threadable_block (single_succ (bb)))
1671 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1673 else if ((last = last_stmt (bb))
1674 && gimple_code (last) == GIMPLE_COND
1675 && EDGE_COUNT (bb->succs) == 2
1676 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1677 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1679 edge true_edge, false_edge;
1681 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1683 /* Only try to thread the edge if it reaches a target block with
1684 more than one predecessor and more than one successor. */
1685 if (potentially_threadable_block (true_edge->dest))
1687 struct edge_info *edge_info;
1688 unsigned int i;
1690 /* Push a marker onto the available expression stack so that we
1691 unwind any expressions related to the TRUE arm before processing
1692 the false arm below. */
1693 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1694 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1696 edge_info = (struct edge_info *) true_edge->aux;
1698 /* If we have info associated with this edge, record it into
1699 our equivalence tables. */
1700 if (edge_info)
1702 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1703 tree lhs = edge_info->lhs;
1704 tree rhs = edge_info->rhs;
1706 /* If we have a simple NAME = VALUE equivalence, record it. */
1707 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1708 record_const_or_copy (lhs, rhs);
1710 /* If we have 0 = COND or 1 = COND equivalences, record them
1711 into our expression hash tables. */
1712 if (cond_equivalences)
1713 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1714 record_cond (&cond_equivalences[i]);
1717 dom_thread_across_edge (walk_data, true_edge);
1719 /* And restore the various tables to their state before
1720 we threaded this edge. */
1721 remove_local_expressions_from_table ();
1724 /* Similarly for the ELSE arm. */
1725 if (potentially_threadable_block (false_edge->dest))
1727 struct edge_info *edge_info;
1728 unsigned int i;
1730 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1731 edge_info = (struct edge_info *) false_edge->aux;
1733 /* If we have info associated with this edge, record it into
1734 our equivalence tables. */
1735 if (edge_info)
1737 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1738 tree lhs = edge_info->lhs;
1739 tree rhs = edge_info->rhs;
1741 /* If we have a simple NAME = VALUE equivalence, record it. */
1742 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1743 record_const_or_copy (lhs, rhs);
1745 /* If we have 0 = COND or 1 = COND equivalences, record them
1746 into our expression hash tables. */
1747 if (cond_equivalences)
1748 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1749 record_cond (&cond_equivalences[i]);
1752 /* Now thread the edge. */
1753 dom_thread_across_edge (walk_data, false_edge);
1755 /* No need to remove local expressions from our tables
1756 or restore vars to their original value as that will
1757 be done immediately below. */
1761 remove_local_expressions_from_table ();
1762 restore_vars_to_original_value ();
1765 /* Search for redundant computations in STMT. If any are found, then
1766 replace them with the variable holding the result of the computation.
1768 If safe, record this expression into the available expression hash
1769 table. */
1771 static void
1772 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1774 tree expr_type;
1775 tree cached_lhs;
1776 bool insert = true;
1777 bool assigns_var_p = false;
1779 gimple stmt = gsi_stmt (*gsi);
1781 tree def = gimple_get_lhs (stmt);
1783 /* Certain expressions on the RHS can be optimized away, but can not
1784 themselves be entered into the hash tables. */
1785 if (! def
1786 || TREE_CODE (def) != SSA_NAME
1787 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1788 || gimple_vdef (stmt)
1789 /* Do not record equivalences for increments of ivs. This would create
1790 overlapping live ranges for a very questionable gain. */
1791 || simple_iv_increment_p (stmt))
1792 insert = false;
1794 /* Check if the expression has been computed before. */
1795 cached_lhs = lookup_avail_expr (stmt, insert);
1797 opt_stats.num_exprs_considered++;
1799 /* Get the type of the expression we are trying to optimize. */
1800 if (is_gimple_assign (stmt))
1802 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1803 assigns_var_p = true;
1805 else if (gimple_code (stmt) == GIMPLE_COND)
1806 expr_type = boolean_type_node;
1807 else if (is_gimple_call (stmt))
1809 gcc_assert (gimple_call_lhs (stmt));
1810 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1811 assigns_var_p = true;
1813 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1814 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1815 else
1816 gcc_unreachable ();
1818 if (!cached_lhs)
1819 return;
1821 /* It is safe to ignore types here since we have already done
1822 type checking in the hashing and equality routines. In fact
1823 type checking here merely gets in the way of constant
1824 propagation. Also, make sure that it is safe to propagate
1825 CACHED_LHS into the expression in STMT. */
1826 if ((TREE_CODE (cached_lhs) != SSA_NAME
1827 && (assigns_var_p
1828 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1829 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1831 #if defined ENABLE_CHECKING
1832 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1833 || is_gimple_min_invariant (cached_lhs));
1834 #endif
1836 if (dump_file && (dump_flags & TDF_DETAILS))
1838 fprintf (dump_file, " Replaced redundant expr '");
1839 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1840 fprintf (dump_file, "' with '");
1841 print_generic_expr (dump_file, cached_lhs, dump_flags);
1842 fprintf (dump_file, "'\n");
1845 opt_stats.num_re++;
1847 if (assigns_var_p
1848 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1849 cached_lhs = fold_convert (expr_type, cached_lhs);
1851 propagate_tree_value_into_stmt (gsi, cached_lhs);
1853 /* Since it is always necessary to mark the result as modified,
1854 perhaps we should move this into propagate_tree_value_into_stmt
1855 itself. */
1856 gimple_set_modified (gsi_stmt (*gsi), true);
1860 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1861 the available expressions table or the const_and_copies table.
1862 Detect and record those equivalences. */
1863 /* We handle only very simple copy equivalences here. The heavy
1864 lifing is done by eliminate_redundant_computations. */
1866 static void
1867 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1869 tree lhs;
1870 enum tree_code lhs_code;
1872 gcc_assert (is_gimple_assign (stmt));
1874 lhs = gimple_assign_lhs (stmt);
1875 lhs_code = TREE_CODE (lhs);
1877 if (lhs_code == SSA_NAME
1878 && gimple_assign_single_p (stmt))
1880 tree rhs = gimple_assign_rhs1 (stmt);
1882 /* If the RHS of the assignment is a constant or another variable that
1883 may be propagated, register it in the CONST_AND_COPIES table. We
1884 do not need to record unwind data for this, since this is a true
1885 assignment and not an equivalence inferred from a comparison. All
1886 uses of this ssa name are dominated by this assignment, so unwinding
1887 just costs time and space. */
1888 if (may_optimize_p
1889 && (TREE_CODE (rhs) == SSA_NAME
1890 || is_gimple_min_invariant (rhs)))
1892 if (dump_file && (dump_flags & TDF_DETAILS))
1894 fprintf (dump_file, "==== ASGN ");
1895 print_generic_expr (dump_file, lhs, 0);
1896 fprintf (dump_file, " = ");
1897 print_generic_expr (dump_file, rhs, 0);
1898 fprintf (dump_file, "\n");
1901 set_ssa_name_value (lhs, rhs);
1905 /* A memory store, even an aliased store, creates a useful
1906 equivalence. By exchanging the LHS and RHS, creating suitable
1907 vops and recording the result in the available expression table,
1908 we may be able to expose more redundant loads. */
1909 if (!gimple_has_volatile_ops (stmt)
1910 && gimple_references_memory_p (stmt)
1911 && gimple_assign_single_p (stmt)
1912 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1913 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1914 && !is_gimple_reg (lhs))
1916 tree rhs = gimple_assign_rhs1 (stmt);
1917 gimple new_stmt;
1919 /* Build a new statement with the RHS and LHS exchanged. */
1920 if (TREE_CODE (rhs) == SSA_NAME)
1922 /* NOTE tuples. The call to gimple_build_assign below replaced
1923 a call to build_gimple_modify_stmt, which did not set the
1924 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1925 may cause an SSA validation failure, as the LHS may be a
1926 default-initialized name and should have no definition. I'm
1927 a bit dubious of this, as the artificial statement that we
1928 generate here may in fact be ill-formed, but it is simply
1929 used as an internal device in this pass, and never becomes
1930 part of the CFG. */
1931 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1932 new_stmt = gimple_build_assign (rhs, lhs);
1933 SSA_NAME_DEF_STMT (rhs) = defstmt;
1935 else
1936 new_stmt = gimple_build_assign (rhs, lhs);
1938 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1940 /* Finally enter the statement into the available expression
1941 table. */
1942 lookup_avail_expr (new_stmt, true);
1946 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1947 CONST_AND_COPIES. */
1949 static void
1950 cprop_operand (gimple stmt, use_operand_p op_p)
1952 tree val;
1953 tree op = USE_FROM_PTR (op_p);
1955 /* If the operand has a known constant value or it is known to be a
1956 copy of some other variable, use the value or copy stored in
1957 CONST_AND_COPIES. */
1958 val = SSA_NAME_VALUE (op);
1959 if (val && val != op)
1961 /* Do not change the base variable in the virtual operand
1962 tables. That would make it impossible to reconstruct
1963 the renamed virtual operand if we later modify this
1964 statement. Also only allow the new value to be an SSA_NAME
1965 for propagation into virtual operands. */
1966 if (!is_gimple_reg (op)
1967 && (TREE_CODE (val) != SSA_NAME
1968 || is_gimple_reg (val)
1969 || get_virtual_var (val) != get_virtual_var (op)))
1970 return;
1972 /* Do not replace hard register operands in asm statements. */
1973 if (gimple_code (stmt) == GIMPLE_ASM
1974 && !may_propagate_copy_into_asm (op))
1975 return;
1977 /* Certain operands are not allowed to be copy propagated due
1978 to their interaction with exception handling and some GCC
1979 extensions. */
1980 if (!may_propagate_copy (op, val))
1981 return;
1983 /* Do not propagate addresses that point to volatiles into memory
1984 stmts without volatile operands. */
1985 if (POINTER_TYPE_P (TREE_TYPE (val))
1986 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
1987 && gimple_has_mem_ops (stmt)
1988 && !gimple_has_volatile_ops (stmt))
1989 return;
1991 /* Do not propagate copies if the propagated value is at a deeper loop
1992 depth than the propagatee. Otherwise, this may move loop variant
1993 variables outside of their loops and prevent coalescing
1994 opportunities. If the value was loop invariant, it will be hoisted
1995 by LICM and exposed for copy propagation. */
1996 if (loop_depth_of_name (val) > loop_depth_of_name (op))
1997 return;
1999 /* Do not propagate copies into simple IV increment statements.
2000 See PR23821 for how this can disturb IV analysis. */
2001 if (TREE_CODE (val) != INTEGER_CST
2002 && simple_iv_increment_p (stmt))
2003 return;
2005 /* Dump details. */
2006 if (dump_file && (dump_flags & TDF_DETAILS))
2008 fprintf (dump_file, " Replaced '");
2009 print_generic_expr (dump_file, op, dump_flags);
2010 fprintf (dump_file, "' with %s '",
2011 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2012 print_generic_expr (dump_file, val, dump_flags);
2013 fprintf (dump_file, "'\n");
2016 if (TREE_CODE (val) != SSA_NAME)
2017 opt_stats.num_const_prop++;
2018 else
2019 opt_stats.num_copy_prop++;
2021 propagate_value (op_p, val);
2023 /* And note that we modified this statement. This is now
2024 safe, even if we changed virtual operands since we will
2025 rescan the statement and rewrite its operands again. */
2026 gimple_set_modified (stmt, true);
2030 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2031 known value for that SSA_NAME (or NULL if no value is known).
2033 Propagate values from CONST_AND_COPIES into the uses, vuses and
2034 vdef_ops of STMT. */
2036 static void
2037 cprop_into_stmt (gimple stmt)
2039 use_operand_p op_p;
2040 ssa_op_iter iter;
2042 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2044 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2045 cprop_operand (stmt, op_p);
2049 /* Optimize the statement pointed to by iterator SI.
2051 We try to perform some simplistic global redundancy elimination and
2052 constant propagation:
2054 1- To detect global redundancy, we keep track of expressions that have
2055 been computed in this block and its dominators. If we find that the
2056 same expression is computed more than once, we eliminate repeated
2057 computations by using the target of the first one.
2059 2- Constant values and copy assignments. This is used to do very
2060 simplistic constant and copy propagation. When a constant or copy
2061 assignment is found, we map the value on the RHS of the assignment to
2062 the variable in the LHS in the CONST_AND_COPIES table. */
2064 static void
2065 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2067 gimple stmt, old_stmt;
2068 bool may_optimize_p;
2069 bool modified_p = false;
2071 old_stmt = stmt = gsi_stmt (si);
2073 if (gimple_code (stmt) == GIMPLE_COND)
2074 canonicalize_comparison (stmt);
2076 update_stmt_if_modified (stmt);
2077 opt_stats.num_stmts++;
2079 if (dump_file && (dump_flags & TDF_DETAILS))
2081 fprintf (dump_file, "Optimizing statement ");
2082 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2085 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2086 cprop_into_stmt (stmt);
2088 /* If the statement has been modified with constant replacements,
2089 fold its RHS before checking for redundant computations. */
2090 if (gimple_modified_p (stmt))
2092 tree rhs = NULL;
2094 /* Try to fold the statement making sure that STMT is kept
2095 up to date. */
2096 if (fold_stmt (&si))
2098 stmt = gsi_stmt (si);
2099 gimple_set_modified (stmt, true);
2101 if (dump_file && (dump_flags & TDF_DETAILS))
2103 fprintf (dump_file, " Folded to: ");
2104 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2108 /* We only need to consider cases that can yield a gimple operand. */
2109 if (gimple_assign_single_p (stmt))
2110 rhs = gimple_assign_rhs1 (stmt);
2111 else if (gimple_code (stmt) == GIMPLE_GOTO)
2112 rhs = gimple_goto_dest (stmt);
2113 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2114 /* This should never be an ADDR_EXPR. */
2115 rhs = gimple_switch_index (stmt);
2117 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2118 recompute_tree_invariant_for_addr_expr (rhs);
2120 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2121 even if fold_stmt updated the stmt already and thus cleared
2122 gimple_modified_p flag on it. */
2123 modified_p = true;
2126 /* Check for redundant computations. Do this optimization only
2127 for assignments that have no volatile ops and conditionals. */
2128 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2129 && ((is_gimple_assign (stmt)
2130 && !gimple_rhs_has_side_effects (stmt))
2131 || (is_gimple_call (stmt)
2132 && gimple_call_lhs (stmt) != NULL_TREE
2133 && !gimple_rhs_has_side_effects (stmt))
2134 || gimple_code (stmt) == GIMPLE_COND
2135 || gimple_code (stmt) == GIMPLE_SWITCH));
2137 if (may_optimize_p)
2139 if (gimple_code (stmt) == GIMPLE_CALL)
2141 /* Resolve __builtin_constant_p. If it hasn't been
2142 folded to integer_one_node by now, it's fairly
2143 certain that the value simply isn't constant. */
2144 tree callee = gimple_call_fndecl (stmt);
2145 if (callee
2146 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2147 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2149 propagate_tree_value_into_stmt (&si, integer_zero_node);
2150 stmt = gsi_stmt (si);
2154 update_stmt_if_modified (stmt);
2155 eliminate_redundant_computations (&si);
2156 stmt = gsi_stmt (si);
2159 /* Record any additional equivalences created by this statement. */
2160 if (is_gimple_assign (stmt))
2161 record_equivalences_from_stmt (stmt, may_optimize_p);
2163 /* If STMT is a COND_EXPR and it was modified, then we may know
2164 where it goes. If that is the case, then mark the CFG as altered.
2166 This will cause us to later call remove_unreachable_blocks and
2167 cleanup_tree_cfg when it is safe to do so. It is not safe to
2168 clean things up here since removal of edges and such can trigger
2169 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2170 the manager.
2172 That's all fine and good, except that once SSA_NAMEs are released
2173 to the manager, we must not call create_ssa_name until all references
2174 to released SSA_NAMEs have been eliminated.
2176 All references to the deleted SSA_NAMEs can not be eliminated until
2177 we remove unreachable blocks.
2179 We can not remove unreachable blocks until after we have completed
2180 any queued jump threading.
2182 We can not complete any queued jump threads until we have taken
2183 appropriate variables out of SSA form. Taking variables out of
2184 SSA form can call create_ssa_name and thus we lose.
2186 Ultimately I suspect we're going to need to change the interface
2187 into the SSA_NAME manager. */
2188 if (gimple_modified_p (stmt) || modified_p)
2190 tree val = NULL;
2192 update_stmt_if_modified (stmt);
2194 if (gimple_code (stmt) == GIMPLE_COND)
2195 val = fold_binary_loc (gimple_location (stmt),
2196 gimple_cond_code (stmt), boolean_type_node,
2197 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2198 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2199 val = gimple_switch_index (stmt);
2201 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2202 cfg_altered = true;
2204 /* If we simplified a statement in such a way as to be shown that it
2205 cannot trap, update the eh information and the cfg to match. */
2206 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2208 bitmap_set_bit (need_eh_cleanup, bb->index);
2209 if (dump_file && (dump_flags & TDF_DETAILS))
2210 fprintf (dump_file, " Flagged to clear EH edges.\n");
2215 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2216 If found, return its LHS. Otherwise insert STMT in the table and
2217 return NULL_TREE.
2219 Also, when an expression is first inserted in the table, it is also
2220 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2221 we finish processing this block and its children. */
2223 static tree
2224 lookup_avail_expr (gimple stmt, bool insert)
2226 void **slot;
2227 tree lhs;
2228 tree temp;
2229 struct expr_hash_elt element;
2231 /* Get LHS of assignment or call, else NULL_TREE. */
2232 lhs = gimple_get_lhs (stmt);
2234 initialize_hash_element (stmt, lhs, &element);
2236 if (dump_file && (dump_flags & TDF_DETAILS))
2238 fprintf (dump_file, "LKUP ");
2239 print_expr_hash_elt (dump_file, &element);
2242 /* Don't bother remembering constant assignments and copy operations.
2243 Constants and copy operations are handled by the constant/copy propagator
2244 in optimize_stmt. */
2245 if (element.expr.kind == EXPR_SINGLE
2246 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2247 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2248 return NULL_TREE;
2250 /* Finally try to find the expression in the main expression hash table. */
2251 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2252 (insert ? INSERT : NO_INSERT));
2253 if (slot == NULL)
2254 return NULL_TREE;
2256 if (*slot == NULL)
2258 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2259 *element2 = element;
2260 element2->stamp = element2;
2261 *slot = (void *) element2;
2263 if (dump_file && (dump_flags & TDF_DETAILS))
2265 fprintf (dump_file, "2>>> ");
2266 print_expr_hash_elt (dump_file, element2);
2269 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
2270 return NULL_TREE;
2273 /* Extract the LHS of the assignment so that it can be used as the current
2274 definition of another variable. */
2275 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2277 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2278 use the value from the const_and_copies table. */
2279 if (TREE_CODE (lhs) == SSA_NAME)
2281 temp = SSA_NAME_VALUE (lhs);
2282 if (temp)
2283 lhs = temp;
2286 if (dump_file && (dump_flags & TDF_DETAILS))
2288 fprintf (dump_file, "FIND: ");
2289 print_generic_expr (dump_file, lhs, 0);
2290 fprintf (dump_file, "\n");
2293 return lhs;
2296 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2297 for expressions using the code of the expression and the SSA numbers of
2298 its operands. */
2300 static hashval_t
2301 avail_expr_hash (const void *p)
2303 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2304 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2305 tree vuse;
2306 hashval_t val = 0;
2308 val = iterative_hash_hashable_expr (expr, val);
2310 /* If the hash table entry is not associated with a statement, then we
2311 can just hash the expression and not worry about virtual operands
2312 and such. */
2313 if (!stmt)
2314 return val;
2316 /* Add the SSA version numbers of the vuse operand. This is important
2317 because compound variables like arrays are not renamed in the
2318 operands. Rather, the rename is done on the virtual variable
2319 representing all the elements of the array. */
2320 if ((vuse = gimple_vuse (stmt)))
2321 val = iterative_hash_expr (vuse, val);
2323 return val;
2326 static hashval_t
2327 real_avail_expr_hash (const void *p)
2329 return ((const struct expr_hash_elt *)p)->hash;
2332 static int
2333 avail_expr_eq (const void *p1, const void *p2)
2335 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2336 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2337 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2338 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2339 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2340 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2342 /* This case should apply only when removing entries from the table. */
2343 if (stamp1 == stamp2)
2344 return true;
2346 /* FIXME tuples:
2347 We add stmts to a hash table and them modify them. To detect the case
2348 that we modify a stmt and then search for it, we assume that the hash
2349 is always modified by that change.
2350 We have to fully check why this doesn't happen on trunk or rewrite
2351 this in a more reliable (and easier to understand) way. */
2352 if (((const struct expr_hash_elt *)p1)->hash
2353 != ((const struct expr_hash_elt *)p2)->hash)
2354 return false;
2356 /* In case of a collision, both RHS have to be identical and have the
2357 same VUSE operands. */
2358 if (hashable_expr_equal_p (expr1, expr2)
2359 && types_compatible_p (expr1->type, expr2->type))
2361 /* Note that STMT1 and/or STMT2 may be NULL. */
2362 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2363 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2366 return false;
2369 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2370 up degenerate PHIs created by or exposed by jump threading. */
2372 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2373 NULL. */
2375 tree
2376 degenerate_phi_result (gimple phi)
2378 tree lhs = gimple_phi_result (phi);
2379 tree val = NULL;
2380 size_t i;
2382 /* Ignoring arguments which are the same as LHS, if all the remaining
2383 arguments are the same, then the PHI is a degenerate and has the
2384 value of that common argument. */
2385 for (i = 0; i < gimple_phi_num_args (phi); i++)
2387 tree arg = gimple_phi_arg_def (phi, i);
2389 if (arg == lhs)
2390 continue;
2391 else if (!arg)
2392 break;
2393 else if (!val)
2394 val = arg;
2395 else if (arg == val)
2396 continue;
2397 /* We bring in some of operand_equal_p not only to speed things
2398 up, but also to avoid crashing when dereferencing the type of
2399 a released SSA name. */
2400 else if (TREE_CODE (val) != TREE_CODE (arg)
2401 || TREE_CODE (val) == SSA_NAME
2402 || !operand_equal_p (arg, val, 0))
2403 break;
2405 return (i == gimple_phi_num_args (phi) ? val : NULL);
2408 /* Given a statement STMT, which is either a PHI node or an assignment,
2409 remove it from the IL. */
2411 static void
2412 remove_stmt_or_phi (gimple stmt)
2414 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2416 if (gimple_code (stmt) == GIMPLE_PHI)
2417 remove_phi_node (&gsi, true);
2418 else
2420 gsi_remove (&gsi, true);
2421 release_defs (stmt);
2425 /* Given a statement STMT, which is either a PHI node or an assignment,
2426 return the "rhs" of the node, in the case of a non-degenerate
2427 phi, NULL is returned. */
2429 static tree
2430 get_rhs_or_phi_arg (gimple stmt)
2432 if (gimple_code (stmt) == GIMPLE_PHI)
2433 return degenerate_phi_result (stmt);
2434 else if (gimple_assign_single_p (stmt))
2435 return gimple_assign_rhs1 (stmt);
2436 else
2437 gcc_unreachable ();
2441 /* Given a statement STMT, which is either a PHI node or an assignment,
2442 return the "lhs" of the node. */
2444 static tree
2445 get_lhs_or_phi_result (gimple stmt)
2447 if (gimple_code (stmt) == GIMPLE_PHI)
2448 return gimple_phi_result (stmt);
2449 else if (is_gimple_assign (stmt))
2450 return gimple_assign_lhs (stmt);
2451 else
2452 gcc_unreachable ();
2455 /* Propagate RHS into all uses of LHS (when possible).
2457 RHS and LHS are derived from STMT, which is passed in solely so
2458 that we can remove it if propagation is successful.
2460 When propagating into a PHI node or into a statement which turns
2461 into a trivial copy or constant initialization, set the
2462 appropriate bit in INTERESTING_NAMEs so that we will visit those
2463 nodes as well in an effort to pick up secondary optimization
2464 opportunities. */
2466 static void
2467 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2469 /* First verify that propagation is valid and isn't going to move a
2470 loop variant variable outside its loop. */
2471 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2472 && (TREE_CODE (rhs) != SSA_NAME
2473 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2474 && may_propagate_copy (lhs, rhs)
2475 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2477 use_operand_p use_p;
2478 imm_use_iterator iter;
2479 gimple use_stmt;
2480 bool all = true;
2482 /* Dump details. */
2483 if (dump_file && (dump_flags & TDF_DETAILS))
2485 fprintf (dump_file, " Replacing '");
2486 print_generic_expr (dump_file, lhs, dump_flags);
2487 fprintf (dump_file, "' with %s '",
2488 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2489 print_generic_expr (dump_file, rhs, dump_flags);
2490 fprintf (dump_file, "'\n");
2493 /* Walk over every use of LHS and try to replace the use with RHS.
2494 At this point the only reason why such a propagation would not
2495 be successful would be if the use occurs in an ASM_EXPR. */
2496 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2498 /* Leave debug stmts alone. If we succeed in propagating
2499 all non-debug uses, we'll drop the DEF, and propagation
2500 into debug stmts will occur then. */
2501 if (gimple_debug_bind_p (use_stmt))
2502 continue;
2504 /* It's not always safe to propagate into an ASM_EXPR. */
2505 if (gimple_code (use_stmt) == GIMPLE_ASM
2506 && ! may_propagate_copy_into_asm (lhs))
2508 all = false;
2509 continue;
2512 /* Dump details. */
2513 if (dump_file && (dump_flags & TDF_DETAILS))
2515 fprintf (dump_file, " Original statement:");
2516 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2519 /* Propagate the RHS into this use of the LHS. */
2520 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2521 propagate_value (use_p, rhs);
2523 /* Special cases to avoid useless calls into the folding
2524 routines, operand scanning, etc.
2526 First, propagation into a PHI may cause the PHI to become
2527 a degenerate, so mark the PHI as interesting. No other
2528 actions are necessary.
2530 Second, if we're propagating a virtual operand and the
2531 propagation does not change the underlying _DECL node for
2532 the virtual operand, then no further actions are necessary. */
2533 if (gimple_code (use_stmt) == GIMPLE_PHI
2534 || (! is_gimple_reg (lhs)
2535 && TREE_CODE (rhs) == SSA_NAME
2536 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2538 /* Dump details. */
2539 if (dump_file && (dump_flags & TDF_DETAILS))
2541 fprintf (dump_file, " Updated statement:");
2542 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2545 /* Propagation into a PHI may expose new degenerate PHIs,
2546 so mark the result of the PHI as interesting. */
2547 if (gimple_code (use_stmt) == GIMPLE_PHI)
2549 tree result = get_lhs_or_phi_result (use_stmt);
2550 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2553 continue;
2556 /* From this point onward we are propagating into a
2557 real statement. Folding may (or may not) be possible,
2558 we may expose new operands, expose dead EH edges,
2559 etc. */
2560 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2561 cannot fold a call that simplifies to a constant,
2562 because the GIMPLE_CALL must be replaced by a
2563 GIMPLE_ASSIGN, and there is no way to effect such a
2564 transformation in-place. We might want to consider
2565 using the more general fold_stmt here. */
2566 fold_stmt_inplace (use_stmt);
2568 /* Sometimes propagation can expose new operands to the
2569 renamer. */
2570 update_stmt (use_stmt);
2572 /* Dump details. */
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2575 fprintf (dump_file, " Updated statement:");
2576 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2579 /* If we replaced a variable index with a constant, then
2580 we would need to update the invariant flag for ADDR_EXPRs. */
2581 if (gimple_assign_single_p (use_stmt)
2582 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2583 recompute_tree_invariant_for_addr_expr
2584 (gimple_assign_rhs1 (use_stmt));
2586 /* If we cleaned up EH information from the statement,
2587 mark its containing block as needing EH cleanups. */
2588 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2590 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2591 if (dump_file && (dump_flags & TDF_DETAILS))
2592 fprintf (dump_file, " Flagged to clear EH edges.\n");
2595 /* Propagation may expose new trivial copy/constant propagation
2596 opportunities. */
2597 if (gimple_assign_single_p (use_stmt)
2598 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2599 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2600 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2602 tree result = get_lhs_or_phi_result (use_stmt);
2603 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2606 /* Propagation into these nodes may make certain edges in
2607 the CFG unexecutable. We want to identify them as PHI nodes
2608 at the destination of those unexecutable edges may become
2609 degenerates. */
2610 else if (gimple_code (use_stmt) == GIMPLE_COND
2611 || gimple_code (use_stmt) == GIMPLE_SWITCH
2612 || gimple_code (use_stmt) == GIMPLE_GOTO)
2614 tree val;
2616 if (gimple_code (use_stmt) == GIMPLE_COND)
2617 val = fold_binary_loc (gimple_location (use_stmt),
2618 gimple_cond_code (use_stmt),
2619 boolean_type_node,
2620 gimple_cond_lhs (use_stmt),
2621 gimple_cond_rhs (use_stmt));
2622 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2623 val = gimple_switch_index (use_stmt);
2624 else
2625 val = gimple_goto_dest (use_stmt);
2627 if (val && is_gimple_min_invariant (val))
2629 basic_block bb = gimple_bb (use_stmt);
2630 edge te = find_taken_edge (bb, val);
2631 edge_iterator ei;
2632 edge e;
2633 gimple_stmt_iterator gsi, psi;
2635 /* Remove all outgoing edges except TE. */
2636 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2638 if (e != te)
2640 /* Mark all the PHI nodes at the destination of
2641 the unexecutable edge as interesting. */
2642 for (psi = gsi_start_phis (e->dest);
2643 !gsi_end_p (psi);
2644 gsi_next (&psi))
2646 gimple phi = gsi_stmt (psi);
2648 tree result = gimple_phi_result (phi);
2649 int version = SSA_NAME_VERSION (result);
2651 bitmap_set_bit (interesting_names, version);
2654 te->probability += e->probability;
2656 te->count += e->count;
2657 remove_edge (e);
2658 cfg_altered = true;
2660 else
2661 ei_next (&ei);
2664 gsi = gsi_last_bb (gimple_bb (use_stmt));
2665 gsi_remove (&gsi, true);
2667 /* And fixup the flags on the single remaining edge. */
2668 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2669 te->flags &= ~EDGE_ABNORMAL;
2670 te->flags |= EDGE_FALLTHRU;
2671 if (te->probability > REG_BR_PROB_BASE)
2672 te->probability = REG_BR_PROB_BASE;
2677 /* Ensure there is nothing else to do. */
2678 gcc_assert (!all || has_zero_uses (lhs));
2680 /* If we were able to propagate away all uses of LHS, then
2681 we can remove STMT. */
2682 if (all)
2683 remove_stmt_or_phi (stmt);
2687 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2688 a statement that is a trivial copy or constant initialization.
2690 Attempt to eliminate T by propagating its RHS into all uses of
2691 its LHS. This may in turn set new bits in INTERESTING_NAMES
2692 for nodes we want to revisit later.
2694 All exit paths should clear INTERESTING_NAMES for the result
2695 of STMT. */
2697 static void
2698 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2700 tree lhs = get_lhs_or_phi_result (stmt);
2701 tree rhs;
2702 int version = SSA_NAME_VERSION (lhs);
2704 /* If the LHS of this statement or PHI has no uses, then we can
2705 just eliminate it. This can occur if, for example, the PHI
2706 was created by block duplication due to threading and its only
2707 use was in the conditional at the end of the block which was
2708 deleted. */
2709 if (has_zero_uses (lhs))
2711 bitmap_clear_bit (interesting_names, version);
2712 remove_stmt_or_phi (stmt);
2713 return;
2716 /* Get the RHS of the assignment or PHI node if the PHI is a
2717 degenerate. */
2718 rhs = get_rhs_or_phi_arg (stmt);
2719 if (!rhs)
2721 bitmap_clear_bit (interesting_names, version);
2722 return;
2725 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2727 /* Note that STMT may well have been deleted by now, so do
2728 not access it, instead use the saved version # to clear
2729 T's entry in the worklist. */
2730 bitmap_clear_bit (interesting_names, version);
2733 /* The first phase in degenerate PHI elimination.
2735 Eliminate the degenerate PHIs in BB, then recurse on the
2736 dominator children of BB. */
2738 static void
2739 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2741 gimple_stmt_iterator gsi;
2742 basic_block son;
2744 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2746 gimple phi = gsi_stmt (gsi);
2748 eliminate_const_or_copy (phi, interesting_names);
2751 /* Recurse into the dominator children of BB. */
2752 for (son = first_dom_son (CDI_DOMINATORS, bb);
2753 son;
2754 son = next_dom_son (CDI_DOMINATORS, son))
2755 eliminate_degenerate_phis_1 (son, interesting_names);
2759 /* A very simple pass to eliminate degenerate PHI nodes from the
2760 IL. This is meant to be fast enough to be able to be run several
2761 times in the optimization pipeline.
2763 Certain optimizations, particularly those which duplicate blocks
2764 or remove edges from the CFG can create or expose PHIs which are
2765 trivial copies or constant initializations.
2767 While we could pick up these optimizations in DOM or with the
2768 combination of copy-prop and CCP, those solutions are far too
2769 heavy-weight for our needs.
2771 This implementation has two phases so that we can efficiently
2772 eliminate the first order degenerate PHIs and second order
2773 degenerate PHIs.
2775 The first phase performs a dominator walk to identify and eliminate
2776 the vast majority of the degenerate PHIs. When a degenerate PHI
2777 is identified and eliminated any affected statements or PHIs
2778 are put on a worklist.
2780 The second phase eliminates degenerate PHIs and trivial copies
2781 or constant initializations using the worklist. This is how we
2782 pick up the secondary optimization opportunities with minimal
2783 cost. */
2785 static unsigned int
2786 eliminate_degenerate_phis (void)
2788 bitmap interesting_names;
2789 bitmap interesting_names1;
2791 /* Bitmap of blocks which need EH information updated. We can not
2792 update it on-the-fly as doing so invalidates the dominator tree. */
2793 need_eh_cleanup = BITMAP_ALLOC (NULL);
2795 /* INTERESTING_NAMES is effectively our worklist, indexed by
2796 SSA_NAME_VERSION.
2798 A set bit indicates that the statement or PHI node which
2799 defines the SSA_NAME should be (re)examined to determine if
2800 it has become a degenerate PHI or trivial const/copy propagation
2801 opportunity.
2803 Experiments have show we generally get better compilation
2804 time behavior with bitmaps rather than sbitmaps. */
2805 interesting_names = BITMAP_ALLOC (NULL);
2806 interesting_names1 = BITMAP_ALLOC (NULL);
2808 calculate_dominance_info (CDI_DOMINATORS);
2809 cfg_altered = false;
2811 /* First phase. Eliminate degenerate PHIs via a dominator
2812 walk of the CFG.
2814 Experiments have indicated that we generally get better
2815 compile-time behavior by visiting blocks in the first
2816 phase in dominator order. Presumably this is because walking
2817 in dominator order leaves fewer PHIs for later examination
2818 by the worklist phase. */
2819 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2821 /* Second phase. Eliminate second order degenerate PHIs as well
2822 as trivial copies or constant initializations identified by
2823 the first phase or this phase. Basically we keep iterating
2824 until our set of INTERESTING_NAMEs is empty. */
2825 while (!bitmap_empty_p (interesting_names))
2827 unsigned int i;
2828 bitmap_iterator bi;
2830 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2831 changed during the loop. Copy it to another bitmap and
2832 use that. */
2833 bitmap_copy (interesting_names1, interesting_names);
2835 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2837 tree name = ssa_name (i);
2839 /* Ignore SSA_NAMEs that have been released because
2840 their defining statement was deleted (unreachable). */
2841 if (name)
2842 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2843 interesting_names);
2847 if (cfg_altered)
2848 free_dominance_info (CDI_DOMINATORS);
2850 /* Propagation of const and copies may make some EH edges dead. Purge
2851 such edges from the CFG as needed. */
2852 if (!bitmap_empty_p (need_eh_cleanup))
2854 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2855 BITMAP_FREE (need_eh_cleanup);
2858 BITMAP_FREE (interesting_names);
2859 BITMAP_FREE (interesting_names1);
2860 return 0;
2863 struct gimple_opt_pass pass_phi_only_cprop =
2866 GIMPLE_PASS,
2867 "phicprop", /* name */
2868 gate_dominator, /* gate */
2869 eliminate_degenerate_phis, /* execute */
2870 NULL, /* sub */
2871 NULL, /* next */
2872 0, /* static_pass_number */
2873 TV_TREE_PHI_CPROP, /* tv_id */
2874 PROP_cfg | PROP_ssa, /* properties_required */
2875 0, /* properties_provided */
2876 0, /* properties_destroyed */
2877 0, /* todo_flags_start */
2878 TODO_cleanup_cfg
2879 | TODO_dump_func
2880 | TODO_ggc_collect
2881 | TODO_verify_ssa
2882 | TODO_verify_stmts
2883 | TODO_update_ssa /* todo_flags_finish */