* inclhack.def (aix_once_init_[12]): New fixes.
[official-gcc.git] / gcc / tree-ssa-dom.c
blob949acf1d16826fc10930ea0b88f97bc200e02896
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "output.h"
32 #include "function.h"
33 #include "tree-pretty-print.h"
34 #include "gimple-pretty-print.h"
35 #include "timevar.h"
36 #include "tree-dump.h"
37 #include "tree-flow.h"
38 #include "domwalk.h"
39 #include "tree-pass.h"
40 #include "tree-ssa-propagate.h"
41 #include "langhooks.h"
42 #include "params.h"
44 /* This file implements optimizations on the dominator tree. */
46 /* Representation of a "naked" right-hand-side expression, to be used
47 in recording available expressions in the expression hash table. */
49 enum expr_kind
51 EXPR_SINGLE,
52 EXPR_UNARY,
53 EXPR_BINARY,
54 EXPR_TERNARY,
55 EXPR_CALL
58 struct hashable_expr
60 tree type;
61 enum expr_kind kind;
62 union {
63 struct { tree rhs; } single;
64 struct { enum tree_code op; tree opnd; } unary;
65 struct { enum tree_code op; tree opnd0, opnd1; } binary;
66 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
67 struct { gimple fn_from; bool pure; size_t nargs; tree *args; } call;
68 } ops;
71 /* Structure for recording known values of a conditional expression
72 at the exits from its block. */
74 typedef struct cond_equivalence_s
76 struct hashable_expr cond;
77 tree value;
78 } cond_equivalence;
80 DEF_VEC_O(cond_equivalence);
81 DEF_VEC_ALLOC_O(cond_equivalence,heap);
83 /* Structure for recording edge equivalences as well as any pending
84 edge redirections during the dominator optimizer.
86 Computing and storing the edge equivalences instead of creating
87 them on-demand can save significant amounts of time, particularly
88 for pathological cases involving switch statements.
90 These structures live for a single iteration of the dominator
91 optimizer in the edge's AUX field. At the end of an iteration we
92 free each of these structures and update the AUX field to point
93 to any requested redirection target (the code for updating the
94 CFG and SSA graph for edge redirection expects redirection edge
95 targets to be in the AUX field for each edge. */
97 struct edge_info
99 /* If this edge creates a simple equivalence, the LHS and RHS of
100 the equivalence will be stored here. */
101 tree lhs;
102 tree rhs;
104 /* Traversing an edge may also indicate one or more particular conditions
105 are true or false. */
106 VEC(cond_equivalence, heap) *cond_equivalences;
109 /* Hash table with expressions made available during the renaming process.
110 When an assignment of the form X_i = EXPR is found, the statement is
111 stored in this table. If the same expression EXPR is later found on the
112 RHS of another statement, it is replaced with X_i (thus performing
113 global redundancy elimination). Similarly as we pass through conditionals
114 we record the conditional itself as having either a true or false value
115 in this table. */
116 static htab_t avail_exprs;
118 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
119 expressions it enters into the hash table along with a marker entry
120 (null). When we finish processing the block, we pop off entries and
121 remove the expressions from the global hash table until we hit the
122 marker. */
123 typedef struct expr_hash_elt * expr_hash_elt_t;
124 DEF_VEC_P(expr_hash_elt_t);
125 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
127 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
129 /* Structure for entries in the expression hash table. */
131 struct expr_hash_elt
133 /* The value (lhs) of this expression. */
134 tree lhs;
136 /* The expression (rhs) we want to record. */
137 struct hashable_expr expr;
139 /* The stmt pointer if this element corresponds to a statement. */
140 gimple stmt;
142 /* The hash value for RHS. */
143 hashval_t hash;
145 /* A unique stamp, typically the address of the hash
146 element itself, used in removing entries from the table. */
147 struct expr_hash_elt *stamp;
150 /* Stack of dest,src pairs that need to be restored during finalization.
152 A NULL entry is used to mark the end of pairs which need to be
153 restored during finalization of this block. */
154 static VEC(tree,heap) *const_and_copies_stack;
156 /* Track whether or not we have changed the control flow graph. */
157 static bool cfg_altered;
159 /* Bitmap of blocks that have had EH statements cleaned. We should
160 remove their dead edges eventually. */
161 static bitmap need_eh_cleanup;
163 /* Statistics for dominator optimizations. */
164 struct opt_stats_d
166 long num_stmts;
167 long num_exprs_considered;
168 long num_re;
169 long num_const_prop;
170 long num_copy_prop;
173 static struct opt_stats_d opt_stats;
175 /* Local functions. */
176 static void optimize_stmt (basic_block, gimple_stmt_iterator);
177 static tree lookup_avail_expr (gimple, bool);
178 static hashval_t avail_expr_hash (const void *);
179 static hashval_t real_avail_expr_hash (const void *);
180 static int avail_expr_eq (const void *, const void *);
181 static void htab_statistics (FILE *, htab_t);
182 static void record_cond (cond_equivalence *);
183 static void record_const_or_copy (tree, tree);
184 static void record_equality (tree, tree);
185 static void record_equivalences_from_phis (basic_block);
186 static void record_equivalences_from_incoming_edge (basic_block);
187 static void eliminate_redundant_computations (gimple_stmt_iterator *);
188 static void record_equivalences_from_stmt (gimple, int);
189 static void dom_thread_across_edge (struct dom_walk_data *, edge);
190 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
191 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
192 static void remove_local_expressions_from_table (void);
193 static void restore_vars_to_original_value (void);
194 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
197 /* Given a statement STMT, initialize the hash table element pointed to
198 by ELEMENT. */
200 static void
201 initialize_hash_element (gimple stmt, tree lhs,
202 struct expr_hash_elt *element)
204 enum gimple_code code = gimple_code (stmt);
205 struct hashable_expr *expr = &element->expr;
207 if (code == GIMPLE_ASSIGN)
209 enum tree_code subcode = gimple_assign_rhs_code (stmt);
211 switch (get_gimple_rhs_class (subcode))
213 case GIMPLE_SINGLE_RHS:
214 expr->kind = EXPR_SINGLE;
215 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
216 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
217 break;
218 case GIMPLE_UNARY_RHS:
219 expr->kind = EXPR_UNARY;
220 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
221 expr->ops.unary.op = subcode;
222 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
223 break;
224 case GIMPLE_BINARY_RHS:
225 expr->kind = EXPR_BINARY;
226 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
227 expr->ops.binary.op = subcode;
228 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
229 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
230 break;
231 case GIMPLE_TERNARY_RHS:
232 expr->kind = EXPR_TERNARY;
233 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
234 expr->ops.ternary.op = subcode;
235 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
236 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
237 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
238 break;
239 default:
240 gcc_unreachable ();
243 else if (code == GIMPLE_COND)
245 expr->type = boolean_type_node;
246 expr->kind = EXPR_BINARY;
247 expr->ops.binary.op = gimple_cond_code (stmt);
248 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
249 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
251 else if (code == GIMPLE_CALL)
253 size_t nargs = gimple_call_num_args (stmt);
254 size_t i;
256 gcc_assert (gimple_call_lhs (stmt));
258 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
259 expr->kind = EXPR_CALL;
260 expr->ops.call.fn_from = stmt;
262 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
263 expr->ops.call.pure = true;
264 else
265 expr->ops.call.pure = false;
267 expr->ops.call.nargs = nargs;
268 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
269 for (i = 0; i < nargs; i++)
270 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
272 else if (code == GIMPLE_SWITCH)
274 expr->type = TREE_TYPE (gimple_switch_index (stmt));
275 expr->kind = EXPR_SINGLE;
276 expr->ops.single.rhs = gimple_switch_index (stmt);
278 else if (code == GIMPLE_GOTO)
280 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
281 expr->kind = EXPR_SINGLE;
282 expr->ops.single.rhs = gimple_goto_dest (stmt);
284 else
285 gcc_unreachable ();
287 element->lhs = lhs;
288 element->stmt = stmt;
289 element->hash = avail_expr_hash (element);
290 element->stamp = element;
293 /* Given a conditional expression COND as a tree, initialize
294 a hashable_expr expression EXPR. The conditional must be a
295 comparison or logical negation. A constant or a variable is
296 not permitted. */
298 static void
299 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
301 expr->type = boolean_type_node;
303 if (COMPARISON_CLASS_P (cond))
305 expr->kind = EXPR_BINARY;
306 expr->ops.binary.op = TREE_CODE (cond);
307 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
308 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
310 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
312 expr->kind = EXPR_UNARY;
313 expr->ops.unary.op = TRUTH_NOT_EXPR;
314 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
316 else
317 gcc_unreachable ();
320 /* Given a hashable_expr expression EXPR and an LHS,
321 initialize the hash table element pointed to by ELEMENT. */
323 static void
324 initialize_hash_element_from_expr (struct hashable_expr *expr,
325 tree lhs,
326 struct expr_hash_elt *element)
328 element->expr = *expr;
329 element->lhs = lhs;
330 element->stmt = NULL;
331 element->hash = avail_expr_hash (element);
332 element->stamp = element;
335 /* Compare two hashable_expr structures for equivalence.
336 They are considered equivalent when the the expressions
337 they denote must necessarily be equal. The logic is intended
338 to follow that of operand_equal_p in fold-const.c */
340 static bool
341 hashable_expr_equal_p (const struct hashable_expr *expr0,
342 const struct hashable_expr *expr1)
344 tree type0 = expr0->type;
345 tree type1 = expr1->type;
347 /* If either type is NULL, there is nothing to check. */
348 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
349 return false;
351 /* If both types don't have the same signedness, precision, and mode,
352 then we can't consider them equal. */
353 if (type0 != type1
354 && (TREE_CODE (type0) == ERROR_MARK
355 || TREE_CODE (type1) == ERROR_MARK
356 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
357 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
358 || TYPE_MODE (type0) != TYPE_MODE (type1)))
359 return false;
361 if (expr0->kind != expr1->kind)
362 return false;
364 switch (expr0->kind)
366 case EXPR_SINGLE:
367 return operand_equal_p (expr0->ops.single.rhs,
368 expr1->ops.single.rhs, 0);
370 case EXPR_UNARY:
371 if (expr0->ops.unary.op != expr1->ops.unary.op)
372 return false;
374 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
375 || expr0->ops.unary.op == NON_LVALUE_EXPR)
376 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
377 return false;
379 return operand_equal_p (expr0->ops.unary.opnd,
380 expr1->ops.unary.opnd, 0);
382 case EXPR_BINARY:
383 if (expr0->ops.binary.op != expr1->ops.binary.op)
384 return false;
386 if (operand_equal_p (expr0->ops.binary.opnd0,
387 expr1->ops.binary.opnd0, 0)
388 && operand_equal_p (expr0->ops.binary.opnd1,
389 expr1->ops.binary.opnd1, 0))
390 return true;
392 /* For commutative ops, allow the other order. */
393 return (commutative_tree_code (expr0->ops.binary.op)
394 && operand_equal_p (expr0->ops.binary.opnd0,
395 expr1->ops.binary.opnd1, 0)
396 && operand_equal_p (expr0->ops.binary.opnd1,
397 expr1->ops.binary.opnd0, 0));
399 case EXPR_TERNARY:
400 if (expr0->ops.ternary.op != expr1->ops.ternary.op
401 || !operand_equal_p (expr0->ops.ternary.opnd2,
402 expr1->ops.ternary.opnd2, 0))
403 return false;
405 if (operand_equal_p (expr0->ops.ternary.opnd0,
406 expr1->ops.ternary.opnd0, 0)
407 && operand_equal_p (expr0->ops.ternary.opnd1,
408 expr1->ops.ternary.opnd1, 0))
409 return true;
411 /* For commutative ops, allow the other order. */
412 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
413 && operand_equal_p (expr0->ops.ternary.opnd0,
414 expr1->ops.ternary.opnd1, 0)
415 && operand_equal_p (expr0->ops.ternary.opnd1,
416 expr1->ops.ternary.opnd0, 0));
418 case EXPR_CALL:
420 size_t i;
422 /* If the calls are to different functions, then they
423 clearly cannot be equal. */
424 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
425 expr1->ops.call.fn_from))
426 return false;
428 if (! expr0->ops.call.pure)
429 return false;
431 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
432 return false;
434 for (i = 0; i < expr0->ops.call.nargs; i++)
435 if (! operand_equal_p (expr0->ops.call.args[i],
436 expr1->ops.call.args[i], 0))
437 return false;
439 return true;
442 default:
443 gcc_unreachable ();
447 /* Compute a hash value for a hashable_expr value EXPR and a
448 previously accumulated hash value VAL. If two hashable_expr
449 values compare equal with hashable_expr_equal_p, they must
450 hash to the same value, given an identical value of VAL.
451 The logic is intended to follow iterative_hash_expr in tree.c. */
453 static hashval_t
454 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
456 switch (expr->kind)
458 case EXPR_SINGLE:
459 val = iterative_hash_expr (expr->ops.single.rhs, val);
460 break;
462 case EXPR_UNARY:
463 val = iterative_hash_object (expr->ops.unary.op, val);
465 /* Make sure to include signedness in the hash computation.
466 Don't hash the type, that can lead to having nodes which
467 compare equal according to operand_equal_p, but which
468 have different hash codes. */
469 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
470 || expr->ops.unary.op == NON_LVALUE_EXPR)
471 val += TYPE_UNSIGNED (expr->type);
473 val = iterative_hash_expr (expr->ops.unary.opnd, val);
474 break;
476 case EXPR_BINARY:
477 val = iterative_hash_object (expr->ops.binary.op, val);
478 if (commutative_tree_code (expr->ops.binary.op))
479 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
480 expr->ops.binary.opnd1, val);
481 else
483 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
484 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
486 break;
488 case EXPR_TERNARY:
489 val = iterative_hash_object (expr->ops.ternary.op, val);
490 if (commutative_ternary_tree_code (expr->ops.ternary.op))
491 val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
492 expr->ops.ternary.opnd1, val);
493 else
495 val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
496 val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
498 val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
499 break;
501 case EXPR_CALL:
503 size_t i;
504 enum tree_code code = CALL_EXPR;
505 gimple fn_from;
507 val = iterative_hash_object (code, val);
508 fn_from = expr->ops.call.fn_from;
509 if (gimple_call_internal_p (fn_from))
510 val = iterative_hash_hashval_t
511 ((hashval_t) gimple_call_internal_fn (fn_from), val);
512 else
513 val = iterative_hash_expr (gimple_call_fn (fn_from), val);
514 for (i = 0; i < expr->ops.call.nargs; i++)
515 val = iterative_hash_expr (expr->ops.call.args[i], val);
517 break;
519 default:
520 gcc_unreachable ();
523 return val;
526 /* Print a diagnostic dump of an expression hash table entry. */
528 static void
529 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
531 if (element->stmt)
532 fprintf (stream, "STMT ");
533 else
534 fprintf (stream, "COND ");
536 if (element->lhs)
538 print_generic_expr (stream, element->lhs, 0);
539 fprintf (stream, " = ");
542 switch (element->expr.kind)
544 case EXPR_SINGLE:
545 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
546 break;
548 case EXPR_UNARY:
549 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
550 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
551 break;
553 case EXPR_BINARY:
554 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
555 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
556 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
557 break;
559 case EXPR_TERNARY:
560 fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
561 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
562 fputs (", ", stream);
563 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
564 fputs (", ", stream);
565 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
566 fputs (">", stream);
567 break;
569 case EXPR_CALL:
571 size_t i;
572 size_t nargs = element->expr.ops.call.nargs;
573 gimple fn_from;
575 fn_from = element->expr.ops.call.fn_from;
576 if (gimple_call_internal_p (fn_from))
577 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
578 stream);
579 else
580 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
581 fprintf (stream, " (");
582 for (i = 0; i < nargs; i++)
584 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
585 if (i + 1 < nargs)
586 fprintf (stream, ", ");
588 fprintf (stream, ")");
590 break;
592 fprintf (stream, "\n");
594 if (element->stmt)
596 fprintf (stream, " ");
597 print_gimple_stmt (stream, element->stmt, 0, 0);
601 /* Delete an expr_hash_elt and reclaim its storage. */
603 static void
604 free_expr_hash_elt (void *elt)
606 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
608 if (element->expr.kind == EXPR_CALL)
609 free (element->expr.ops.call.args);
611 free (element);
614 /* Allocate an EDGE_INFO for edge E and attach it to E.
615 Return the new EDGE_INFO structure. */
617 static struct edge_info *
618 allocate_edge_info (edge e)
620 struct edge_info *edge_info;
622 edge_info = XCNEW (struct edge_info);
624 e->aux = edge_info;
625 return edge_info;
628 /* Free all EDGE_INFO structures associated with edges in the CFG.
629 If a particular edge can be threaded, copy the redirection
630 target from the EDGE_INFO structure into the edge's AUX field
631 as required by code to update the CFG and SSA graph for
632 jump threading. */
634 static void
635 free_all_edge_infos (void)
637 basic_block bb;
638 edge_iterator ei;
639 edge e;
641 FOR_EACH_BB (bb)
643 FOR_EACH_EDGE (e, ei, bb->preds)
645 struct edge_info *edge_info = (struct edge_info *) e->aux;
647 if (edge_info)
649 if (edge_info->cond_equivalences)
650 VEC_free (cond_equivalence, heap, edge_info->cond_equivalences);
651 free (edge_info);
652 e->aux = NULL;
658 /* Jump threading, redundancy elimination and const/copy propagation.
660 This pass may expose new symbols that need to be renamed into SSA. For
661 every new symbol exposed, its corresponding bit will be set in
662 VARS_TO_RENAME. */
664 static unsigned int
665 tree_ssa_dominator_optimize (void)
667 struct dom_walk_data walk_data;
669 memset (&opt_stats, 0, sizeof (opt_stats));
671 /* Create our hash tables. */
672 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
673 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
674 const_and_copies_stack = VEC_alloc (tree, heap, 20);
675 need_eh_cleanup = BITMAP_ALLOC (NULL);
677 /* Setup callbacks for the generic dominator tree walker. */
678 walk_data.dom_direction = CDI_DOMINATORS;
679 walk_data.initialize_block_local_data = NULL;
680 walk_data.before_dom_children = dom_opt_enter_block;
681 walk_data.after_dom_children = dom_opt_leave_block;
682 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
683 When we attach more stuff we'll need to fill this out with a real
684 structure. */
685 walk_data.global_data = NULL;
686 walk_data.block_local_data_size = 0;
688 /* Now initialize the dominator walker. */
689 init_walk_dominator_tree (&walk_data);
691 calculate_dominance_info (CDI_DOMINATORS);
692 cfg_altered = false;
694 /* We need to know loop structures in order to avoid destroying them
695 in jump threading. Note that we still can e.g. thread through loop
696 headers to an exit edge, or through loop header to the loop body, assuming
697 that we update the loop info. */
698 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
700 /* Initialize the value-handle array. */
701 threadedge_initialize_values ();
703 /* We need accurate information regarding back edges in the CFG
704 for jump threading; this may include back edges that are not part of
705 a single loop. */
706 mark_dfs_back_edges ();
708 /* Recursively walk the dominator tree optimizing statements. */
709 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
712 gimple_stmt_iterator gsi;
713 basic_block bb;
714 FOR_EACH_BB (bb)
716 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
717 update_stmt_if_modified (gsi_stmt (gsi));
721 /* If we exposed any new variables, go ahead and put them into
722 SSA form now, before we handle jump threading. This simplifies
723 interactions between rewriting of _DECL nodes into SSA form
724 and rewriting SSA_NAME nodes into SSA form after block
725 duplication and CFG manipulation. */
726 update_ssa (TODO_update_ssa);
728 free_all_edge_infos ();
730 /* Thread jumps, creating duplicate blocks as needed. */
731 cfg_altered |= thread_through_all_blocks (first_pass_instance);
733 if (cfg_altered)
734 free_dominance_info (CDI_DOMINATORS);
736 /* Removal of statements may make some EH edges dead. Purge
737 such edges from the CFG as needed. */
738 if (!bitmap_empty_p (need_eh_cleanup))
740 unsigned i;
741 bitmap_iterator bi;
743 /* Jump threading may have created forwarder blocks from blocks
744 needing EH cleanup; the new successor of these blocks, which
745 has inherited from the original block, needs the cleanup. */
746 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
748 basic_block bb = BASIC_BLOCK (i);
749 if (bb
750 && single_succ_p (bb)
751 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
753 bitmap_clear_bit (need_eh_cleanup, i);
754 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
758 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
759 bitmap_zero (need_eh_cleanup);
762 statistics_counter_event (cfun, "Redundant expressions eliminated",
763 opt_stats.num_re);
764 statistics_counter_event (cfun, "Constants propagated",
765 opt_stats.num_const_prop);
766 statistics_counter_event (cfun, "Copies propagated",
767 opt_stats.num_copy_prop);
769 /* Debugging dumps. */
770 if (dump_file && (dump_flags & TDF_STATS))
771 dump_dominator_optimization_stats (dump_file);
773 loop_optimizer_finalize ();
775 /* Delete our main hashtable. */
776 htab_delete (avail_exprs);
778 /* And finalize the dominator walker. */
779 fini_walk_dominator_tree (&walk_data);
781 /* Free asserted bitmaps and stacks. */
782 BITMAP_FREE (need_eh_cleanup);
784 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
785 VEC_free (tree, heap, const_and_copies_stack);
787 /* Free the value-handle array. */
788 threadedge_finalize_values ();
789 ssa_name_values = NULL;
791 return 0;
794 static bool
795 gate_dominator (void)
797 return flag_tree_dom != 0;
800 struct gimple_opt_pass pass_dominator =
803 GIMPLE_PASS,
804 "dom", /* name */
805 gate_dominator, /* gate */
806 tree_ssa_dominator_optimize, /* execute */
807 NULL, /* sub */
808 NULL, /* next */
809 0, /* static_pass_number */
810 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
811 PROP_cfg | PROP_ssa, /* properties_required */
812 0, /* properties_provided */
813 0, /* properties_destroyed */
814 0, /* todo_flags_start */
815 TODO_cleanup_cfg
816 | TODO_update_ssa
817 | TODO_verify_ssa
818 | TODO_verify_flow /* todo_flags_finish */
823 /* Given a conditional statement CONDSTMT, convert the
824 condition to a canonical form. */
826 static void
827 canonicalize_comparison (gimple condstmt)
829 tree op0;
830 tree op1;
831 enum tree_code code;
833 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
835 op0 = gimple_cond_lhs (condstmt);
836 op1 = gimple_cond_rhs (condstmt);
838 code = gimple_cond_code (condstmt);
840 /* If it would be profitable to swap the operands, then do so to
841 canonicalize the statement, enabling better optimization.
843 By placing canonicalization of such expressions here we
844 transparently keep statements in canonical form, even
845 when the statement is modified. */
846 if (tree_swap_operands_p (op0, op1, false))
848 /* For relationals we need to swap the operands
849 and change the code. */
850 if (code == LT_EXPR
851 || code == GT_EXPR
852 || code == LE_EXPR
853 || code == GE_EXPR)
855 code = swap_tree_comparison (code);
857 gimple_cond_set_code (condstmt, code);
858 gimple_cond_set_lhs (condstmt, op1);
859 gimple_cond_set_rhs (condstmt, op0);
861 update_stmt (condstmt);
866 /* Initialize local stacks for this optimizer and record equivalences
867 upon entry to BB. Equivalences can come from the edge traversed to
868 reach BB or they may come from PHI nodes at the start of BB. */
870 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
871 LIMIT entries left in LOCALs. */
873 static void
874 remove_local_expressions_from_table (void)
876 /* Remove all the expressions made available in this block. */
877 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
879 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
880 void **slot;
882 if (victim == NULL)
883 break;
885 /* This must precede the actual removal from the hash table,
886 as ELEMENT and the table entry may share a call argument
887 vector which will be freed during removal. */
888 if (dump_file && (dump_flags & TDF_DETAILS))
890 fprintf (dump_file, "<<<< ");
891 print_expr_hash_elt (dump_file, victim);
894 slot = htab_find_slot_with_hash (avail_exprs,
895 victim, victim->hash, NO_INSERT);
896 gcc_assert (slot && *slot == (void *) victim);
897 htab_clear_slot (avail_exprs, slot);
901 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
902 CONST_AND_COPIES to its original state, stopping when we hit a
903 NULL marker. */
905 static void
906 restore_vars_to_original_value (void)
908 while (VEC_length (tree, const_and_copies_stack) > 0)
910 tree prev_value, dest;
912 dest = VEC_pop (tree, const_and_copies_stack);
914 if (dest == NULL)
915 break;
917 if (dump_file && (dump_flags & TDF_DETAILS))
919 fprintf (dump_file, "<<<< COPY ");
920 print_generic_expr (dump_file, dest, 0);
921 fprintf (dump_file, " = ");
922 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
923 fprintf (dump_file, "\n");
926 prev_value = VEC_pop (tree, const_and_copies_stack);
927 set_ssa_name_value (dest, prev_value);
931 /* A trivial wrapper so that we can present the generic jump
932 threading code with a simple API for simplifying statements. */
933 static tree
934 simplify_stmt_for_jump_threading (gimple stmt,
935 gimple within_stmt ATTRIBUTE_UNUSED)
937 return lookup_avail_expr (stmt, false);
940 /* Wrapper for common code to attempt to thread an edge. For example,
941 it handles lazily building the dummy condition and the bookkeeping
942 when jump threading is successful. */
944 static void
945 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
947 if (! walk_data->global_data)
949 gimple dummy_cond =
950 gimple_build_cond (NE_EXPR,
951 integer_zero_node, integer_zero_node,
952 NULL, NULL);
953 walk_data->global_data = dummy_cond;
956 thread_across_edge ((gimple) walk_data->global_data, e, false,
957 &const_and_copies_stack,
958 simplify_stmt_for_jump_threading);
961 /* PHI nodes can create equivalences too.
963 Ignoring any alternatives which are the same as the result, if
964 all the alternatives are equal, then the PHI node creates an
965 equivalence. */
967 static void
968 record_equivalences_from_phis (basic_block bb)
970 gimple_stmt_iterator gsi;
972 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
974 gimple phi = gsi_stmt (gsi);
976 tree lhs = gimple_phi_result (phi);
977 tree rhs = NULL;
978 size_t i;
980 for (i = 0; i < gimple_phi_num_args (phi); i++)
982 tree t = gimple_phi_arg_def (phi, i);
984 /* Ignore alternatives which are the same as our LHS. Since
985 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
986 can simply compare pointers. */
987 if (lhs == t)
988 continue;
990 /* If we have not processed an alternative yet, then set
991 RHS to this alternative. */
992 if (rhs == NULL)
993 rhs = t;
994 /* If we have processed an alternative (stored in RHS), then
995 see if it is equal to this one. If it isn't, then stop
996 the search. */
997 else if (! operand_equal_for_phi_arg_p (rhs, t))
998 break;
1001 /* If we had no interesting alternatives, then all the RHS alternatives
1002 must have been the same as LHS. */
1003 if (!rhs)
1004 rhs = lhs;
1006 /* If we managed to iterate through each PHI alternative without
1007 breaking out of the loop, then we have a PHI which may create
1008 a useful equivalence. We do not need to record unwind data for
1009 this, since this is a true assignment and not an equivalence
1010 inferred from a comparison. All uses of this ssa name are dominated
1011 by this assignment, so unwinding just costs time and space. */
1012 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
1013 set_ssa_name_value (lhs, rhs);
1017 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1018 return that edge. Otherwise return NULL. */
1019 static edge
1020 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1022 edge retval = NULL;
1023 edge e;
1024 edge_iterator ei;
1026 FOR_EACH_EDGE (e, ei, bb->preds)
1028 /* A loop back edge can be identified by the destination of
1029 the edge dominating the source of the edge. */
1030 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1031 continue;
1033 /* If we have already seen a non-loop edge, then we must have
1034 multiple incoming non-loop edges and thus we return NULL. */
1035 if (retval)
1036 return NULL;
1038 /* This is the first non-loop incoming edge we have found. Record
1039 it. */
1040 retval = e;
1043 return retval;
1046 /* Record any equivalences created by the incoming edge to BB. If BB
1047 has more than one incoming edge, then no equivalence is created. */
1049 static void
1050 record_equivalences_from_incoming_edge (basic_block bb)
1052 edge e;
1053 basic_block parent;
1054 struct edge_info *edge_info;
1056 /* If our parent block ended with a control statement, then we may be
1057 able to record some equivalences based on which outgoing edge from
1058 the parent was followed. */
1059 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1061 e = single_incoming_edge_ignoring_loop_edges (bb);
1063 /* If we had a single incoming edge from our parent block, then enter
1064 any data associated with the edge into our tables. */
1065 if (e && e->src == parent)
1067 unsigned int i;
1069 edge_info = (struct edge_info *) e->aux;
1071 if (edge_info)
1073 tree lhs = edge_info->lhs;
1074 tree rhs = edge_info->rhs;
1075 cond_equivalence *eq;
1077 if (lhs)
1078 record_equality (lhs, rhs);
1080 for (i = 0; VEC_iterate (cond_equivalence,
1081 edge_info->cond_equivalences, i, eq); ++i)
1082 record_cond (eq);
1087 /* Dump SSA statistics on FILE. */
1089 void
1090 dump_dominator_optimization_stats (FILE *file)
1092 fprintf (file, "Total number of statements: %6ld\n\n",
1093 opt_stats.num_stmts);
1094 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1095 opt_stats.num_exprs_considered);
1097 fprintf (file, "\nHash table statistics:\n");
1099 fprintf (file, " avail_exprs: ");
1100 htab_statistics (file, avail_exprs);
1104 /* Dump SSA statistics on stderr. */
1106 DEBUG_FUNCTION void
1107 debug_dominator_optimization_stats (void)
1109 dump_dominator_optimization_stats (stderr);
1113 /* Dump statistics for the hash table HTAB. */
1115 static void
1116 htab_statistics (FILE *file, htab_t htab)
1118 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1119 (long) htab_size (htab),
1120 (long) htab_elements (htab),
1121 htab_collisions (htab));
1125 /* Enter condition equivalence into the expression hash table.
1126 This indicates that a conditional expression has a known
1127 boolean value. */
1129 static void
1130 record_cond (cond_equivalence *p)
1132 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1133 void **slot;
1135 initialize_hash_element_from_expr (&p->cond, p->value, element);
1137 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1138 element->hash, INSERT);
1139 if (*slot == NULL)
1141 *slot = (void *) element;
1143 if (dump_file && (dump_flags & TDF_DETAILS))
1145 fprintf (dump_file, "1>>> ");
1146 print_expr_hash_elt (dump_file, element);
1149 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1151 else
1152 free (element);
1155 /* Build a cond_equivalence record indicating that the comparison
1156 CODE holds between operands OP0 and OP1 and push it to **P. */
1158 static void
1159 build_and_record_new_cond (enum tree_code code,
1160 tree op0, tree op1,
1161 VEC(cond_equivalence, heap) **p)
1163 cond_equivalence c;
1164 struct hashable_expr *cond = &c.cond;
1166 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1168 cond->type = boolean_type_node;
1169 cond->kind = EXPR_BINARY;
1170 cond->ops.binary.op = code;
1171 cond->ops.binary.opnd0 = op0;
1172 cond->ops.binary.opnd1 = op1;
1174 c.value = boolean_true_node;
1175 VEC_safe_push (cond_equivalence, heap, *p, &c);
1178 /* Record that COND is true and INVERTED is false into the edge information
1179 structure. Also record that any conditions dominated by COND are true
1180 as well.
1182 For example, if a < b is true, then a <= b must also be true. */
1184 static void
1185 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1187 tree op0, op1;
1188 cond_equivalence c;
1190 if (!COMPARISON_CLASS_P (cond))
1191 return;
1193 op0 = TREE_OPERAND (cond, 0);
1194 op1 = TREE_OPERAND (cond, 1);
1196 switch (TREE_CODE (cond))
1198 case LT_EXPR:
1199 case GT_EXPR:
1200 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1202 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1203 &edge_info->cond_equivalences);
1204 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1205 &edge_info->cond_equivalences);
1208 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1209 ? LE_EXPR : GE_EXPR),
1210 op0, op1, &edge_info->cond_equivalences);
1211 build_and_record_new_cond (NE_EXPR, op0, op1,
1212 &edge_info->cond_equivalences);
1213 break;
1215 case GE_EXPR:
1216 case LE_EXPR:
1217 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1219 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1220 &edge_info->cond_equivalences);
1222 break;
1224 case EQ_EXPR:
1225 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1227 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1228 &edge_info->cond_equivalences);
1230 build_and_record_new_cond (LE_EXPR, op0, op1,
1231 &edge_info->cond_equivalences);
1232 build_and_record_new_cond (GE_EXPR, op0, op1,
1233 &edge_info->cond_equivalences);
1234 break;
1236 case UNORDERED_EXPR:
1237 build_and_record_new_cond (NE_EXPR, op0, op1,
1238 &edge_info->cond_equivalences);
1239 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1240 &edge_info->cond_equivalences);
1241 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1242 &edge_info->cond_equivalences);
1243 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1244 &edge_info->cond_equivalences);
1245 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1246 &edge_info->cond_equivalences);
1247 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1248 &edge_info->cond_equivalences);
1249 break;
1251 case UNLT_EXPR:
1252 case UNGT_EXPR:
1253 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1254 ? UNLE_EXPR : UNGE_EXPR),
1255 op0, op1, &edge_info->cond_equivalences);
1256 build_and_record_new_cond (NE_EXPR, op0, op1,
1257 &edge_info->cond_equivalences);
1258 break;
1260 case UNEQ_EXPR:
1261 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1262 &edge_info->cond_equivalences);
1263 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1264 &edge_info->cond_equivalences);
1265 break;
1267 case LTGT_EXPR:
1268 build_and_record_new_cond (NE_EXPR, op0, op1,
1269 &edge_info->cond_equivalences);
1270 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1271 &edge_info->cond_equivalences);
1272 break;
1274 default:
1275 break;
1278 /* Now store the original true and false conditions into the first
1279 two slots. */
1280 initialize_expr_from_cond (cond, &c.cond);
1281 c.value = boolean_true_node;
1282 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1284 /* It is possible for INVERTED to be the negation of a comparison,
1285 and not a valid RHS or GIMPLE_COND condition. This happens because
1286 invert_truthvalue may return such an expression when asked to invert
1287 a floating-point comparison. These comparisons are not assumed to
1288 obey the trichotomy law. */
1289 initialize_expr_from_cond (inverted, &c.cond);
1290 c.value = boolean_false_node;
1291 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1294 /* A helper function for record_const_or_copy and record_equality.
1295 Do the work of recording the value and undo info. */
1297 static void
1298 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1300 set_ssa_name_value (x, y);
1302 if (dump_file && (dump_flags & TDF_DETAILS))
1304 fprintf (dump_file, "0>>> COPY ");
1305 print_generic_expr (dump_file, x, 0);
1306 fprintf (dump_file, " = ");
1307 print_generic_expr (dump_file, y, 0);
1308 fprintf (dump_file, "\n");
1311 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1312 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1313 VEC_quick_push (tree, const_and_copies_stack, x);
1316 /* Return the loop depth of the basic block of the defining statement of X.
1317 This number should not be treated as absolutely correct because the loop
1318 information may not be completely up-to-date when dom runs. However, it
1319 will be relatively correct, and as more passes are taught to keep loop info
1320 up to date, the result will become more and more accurate. */
1323 loop_depth_of_name (tree x)
1325 gimple defstmt;
1326 basic_block defbb;
1328 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1329 if (TREE_CODE (x) != SSA_NAME)
1330 return 0;
1332 /* Otherwise return the loop depth of the defining statement's bb.
1333 Note that there may not actually be a bb for this statement, if the
1334 ssa_name is live on entry. */
1335 defstmt = SSA_NAME_DEF_STMT (x);
1336 defbb = gimple_bb (defstmt);
1337 if (!defbb)
1338 return 0;
1340 return defbb->loop_depth;
1343 /* Record that X is equal to Y in const_and_copies. Record undo
1344 information in the block-local vector. */
1346 static void
1347 record_const_or_copy (tree x, tree y)
1349 tree prev_x = SSA_NAME_VALUE (x);
1351 gcc_assert (TREE_CODE (x) == SSA_NAME);
1353 if (TREE_CODE (y) == SSA_NAME)
1355 tree tmp = SSA_NAME_VALUE (y);
1356 if (tmp)
1357 y = tmp;
1360 record_const_or_copy_1 (x, y, prev_x);
1363 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1364 This constrains the cases in which we may treat this as assignment. */
1366 static void
1367 record_equality (tree x, tree y)
1369 tree prev_x = NULL, prev_y = NULL;
1371 if (TREE_CODE (x) == SSA_NAME)
1372 prev_x = SSA_NAME_VALUE (x);
1373 if (TREE_CODE (y) == SSA_NAME)
1374 prev_y = SSA_NAME_VALUE (y);
1376 /* If one of the previous values is invariant, or invariant in more loops
1377 (by depth), then use that.
1378 Otherwise it doesn't matter which value we choose, just so
1379 long as we canonicalize on one value. */
1380 if (is_gimple_min_invariant (y))
1382 else if (is_gimple_min_invariant (x)
1383 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1384 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1385 else if (prev_x && is_gimple_min_invariant (prev_x))
1386 x = y, y = prev_x, prev_x = prev_y;
1387 else if (prev_y)
1388 y = prev_y;
1390 /* After the swapping, we must have one SSA_NAME. */
1391 if (TREE_CODE (x) != SSA_NAME)
1392 return;
1394 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1395 variable compared against zero. If we're honoring signed zeros,
1396 then we cannot record this value unless we know that the value is
1397 nonzero. */
1398 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1399 && (TREE_CODE (y) != REAL_CST
1400 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1401 return;
1403 record_const_or_copy_1 (x, y, prev_x);
1406 /* Returns true when STMT is a simple iv increment. It detects the
1407 following situation:
1409 i_1 = phi (..., i_2)
1410 i_2 = i_1 +/- ... */
1412 bool
1413 simple_iv_increment_p (gimple stmt)
1415 enum tree_code code;
1416 tree lhs, preinc;
1417 gimple phi;
1418 size_t i;
1420 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1421 return false;
1423 lhs = gimple_assign_lhs (stmt);
1424 if (TREE_CODE (lhs) != SSA_NAME)
1425 return false;
1427 code = gimple_assign_rhs_code (stmt);
1428 if (code != PLUS_EXPR
1429 && code != MINUS_EXPR
1430 && code != POINTER_PLUS_EXPR)
1431 return false;
1433 preinc = gimple_assign_rhs1 (stmt);
1434 if (TREE_CODE (preinc) != SSA_NAME)
1435 return false;
1437 phi = SSA_NAME_DEF_STMT (preinc);
1438 if (gimple_code (phi) != GIMPLE_PHI)
1439 return false;
1441 for (i = 0; i < gimple_phi_num_args (phi); i++)
1442 if (gimple_phi_arg_def (phi, i) == lhs)
1443 return true;
1445 return false;
1448 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1449 known value for that SSA_NAME (or NULL if no value is known).
1451 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1452 successors of BB. */
1454 static void
1455 cprop_into_successor_phis (basic_block bb)
1457 edge e;
1458 edge_iterator ei;
1460 FOR_EACH_EDGE (e, ei, bb->succs)
1462 int indx;
1463 gimple_stmt_iterator gsi;
1465 /* If this is an abnormal edge, then we do not want to copy propagate
1466 into the PHI alternative associated with this edge. */
1467 if (e->flags & EDGE_ABNORMAL)
1468 continue;
1470 gsi = gsi_start_phis (e->dest);
1471 if (gsi_end_p (gsi))
1472 continue;
1474 indx = e->dest_idx;
1475 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1477 tree new_val;
1478 use_operand_p orig_p;
1479 tree orig_val;
1480 gimple phi = gsi_stmt (gsi);
1482 /* The alternative may be associated with a constant, so verify
1483 it is an SSA_NAME before doing anything with it. */
1484 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1485 orig_val = get_use_from_ptr (orig_p);
1486 if (TREE_CODE (orig_val) != SSA_NAME)
1487 continue;
1489 /* If we have *ORIG_P in our constant/copy table, then replace
1490 ORIG_P with its value in our constant/copy table. */
1491 new_val = SSA_NAME_VALUE (orig_val);
1492 if (new_val
1493 && new_val != orig_val
1494 && (TREE_CODE (new_val) == SSA_NAME
1495 || is_gimple_min_invariant (new_val))
1496 && may_propagate_copy (orig_val, new_val))
1497 propagate_value (orig_p, new_val);
1502 /* We have finished optimizing BB, record any information implied by
1503 taking a specific outgoing edge from BB. */
1505 static void
1506 record_edge_info (basic_block bb)
1508 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1509 struct edge_info *edge_info;
1511 if (! gsi_end_p (gsi))
1513 gimple stmt = gsi_stmt (gsi);
1514 location_t loc = gimple_location (stmt);
1516 if (gimple_code (stmt) == GIMPLE_SWITCH)
1518 tree index = gimple_switch_index (stmt);
1520 if (TREE_CODE (index) == SSA_NAME)
1522 int i;
1523 int n_labels = gimple_switch_num_labels (stmt);
1524 tree *info = XCNEWVEC (tree, last_basic_block);
1525 edge e;
1526 edge_iterator ei;
1528 for (i = 0; i < n_labels; i++)
1530 tree label = gimple_switch_label (stmt, i);
1531 basic_block target_bb = label_to_block (CASE_LABEL (label));
1532 if (CASE_HIGH (label)
1533 || !CASE_LOW (label)
1534 || info[target_bb->index])
1535 info[target_bb->index] = error_mark_node;
1536 else
1537 info[target_bb->index] = label;
1540 FOR_EACH_EDGE (e, ei, bb->succs)
1542 basic_block target_bb = e->dest;
1543 tree label = info[target_bb->index];
1545 if (label != NULL && label != error_mark_node)
1547 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1548 CASE_LOW (label));
1549 edge_info = allocate_edge_info (e);
1550 edge_info->lhs = index;
1551 edge_info->rhs = x;
1554 free (info);
1558 /* A COND_EXPR may create equivalences too. */
1559 if (gimple_code (stmt) == GIMPLE_COND)
1561 edge true_edge;
1562 edge false_edge;
1564 tree op0 = gimple_cond_lhs (stmt);
1565 tree op1 = gimple_cond_rhs (stmt);
1566 enum tree_code code = gimple_cond_code (stmt);
1568 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1570 /* Special case comparing booleans against a constant as we
1571 know the value of OP0 on both arms of the branch. i.e., we
1572 can record an equivalence for OP0 rather than COND. */
1573 if ((code == EQ_EXPR || code == NE_EXPR)
1574 && TREE_CODE (op0) == SSA_NAME
1575 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1576 && is_gimple_min_invariant (op1))
1578 if (code == EQ_EXPR)
1580 edge_info = allocate_edge_info (true_edge);
1581 edge_info->lhs = op0;
1582 edge_info->rhs = (integer_zerop (op1)
1583 ? boolean_false_node
1584 : boolean_true_node);
1586 edge_info = allocate_edge_info (false_edge);
1587 edge_info->lhs = op0;
1588 edge_info->rhs = (integer_zerop (op1)
1589 ? boolean_true_node
1590 : boolean_false_node);
1592 else
1594 edge_info = allocate_edge_info (true_edge);
1595 edge_info->lhs = op0;
1596 edge_info->rhs = (integer_zerop (op1)
1597 ? boolean_true_node
1598 : boolean_false_node);
1600 edge_info = allocate_edge_info (false_edge);
1601 edge_info->lhs = op0;
1602 edge_info->rhs = (integer_zerop (op1)
1603 ? boolean_false_node
1604 : boolean_true_node);
1607 else if (is_gimple_min_invariant (op0)
1608 && (TREE_CODE (op1) == SSA_NAME
1609 || is_gimple_min_invariant (op1)))
1611 tree cond = build2 (code, boolean_type_node, op0, op1);
1612 tree inverted = invert_truthvalue_loc (loc, cond);
1613 bool can_infer_simple_equiv
1614 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
1615 && real_zerop (op0));
1616 struct edge_info *edge_info;
1618 edge_info = allocate_edge_info (true_edge);
1619 record_conditions (edge_info, cond, inverted);
1621 if (can_infer_simple_equiv && code == EQ_EXPR)
1623 edge_info->lhs = op1;
1624 edge_info->rhs = op0;
1627 edge_info = allocate_edge_info (false_edge);
1628 record_conditions (edge_info, inverted, cond);
1630 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1632 edge_info->lhs = op1;
1633 edge_info->rhs = op0;
1637 else if (TREE_CODE (op0) == SSA_NAME
1638 && (TREE_CODE (op1) == SSA_NAME
1639 || is_gimple_min_invariant (op1)))
1641 tree cond = build2 (code, boolean_type_node, op0, op1);
1642 tree inverted = invert_truthvalue_loc (loc, cond);
1643 bool can_infer_simple_equiv
1644 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op1)))
1645 && (TREE_CODE (op1) == SSA_NAME || real_zerop (op1)));
1646 struct edge_info *edge_info;
1648 edge_info = allocate_edge_info (true_edge);
1649 record_conditions (edge_info, cond, inverted);
1651 if (can_infer_simple_equiv && code == EQ_EXPR)
1653 edge_info->lhs = op0;
1654 edge_info->rhs = op1;
1657 edge_info = allocate_edge_info (false_edge);
1658 record_conditions (edge_info, inverted, cond);
1660 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1662 edge_info->lhs = op0;
1663 edge_info->rhs = op1;
1668 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1672 static void
1673 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1674 basic_block bb)
1676 gimple_stmt_iterator gsi;
1678 if (dump_file && (dump_flags & TDF_DETAILS))
1679 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1681 /* Push a marker on the stacks of local information so that we know how
1682 far to unwind when we finalize this block. */
1683 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1684 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1686 record_equivalences_from_incoming_edge (bb);
1688 /* PHI nodes can create equivalences too. */
1689 record_equivalences_from_phis (bb);
1691 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1692 optimize_stmt (bb, gsi);
1694 /* Now prepare to process dominated blocks. */
1695 record_edge_info (bb);
1696 cprop_into_successor_phis (bb);
1699 /* We have finished processing the dominator children of BB, perform
1700 any finalization actions in preparation for leaving this node in
1701 the dominator tree. */
1703 static void
1704 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1706 gimple last;
1708 /* If we have an outgoing edge to a block with multiple incoming and
1709 outgoing edges, then we may be able to thread the edge, i.e., we
1710 may be able to statically determine which of the outgoing edges
1711 will be traversed when the incoming edge from BB is traversed. */
1712 if (single_succ_p (bb)
1713 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1714 && potentially_threadable_block (single_succ (bb)))
1716 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1718 else if ((last = last_stmt (bb))
1719 && gimple_code (last) == GIMPLE_COND
1720 && EDGE_COUNT (bb->succs) == 2
1721 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1722 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1724 edge true_edge, false_edge;
1726 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1728 /* Only try to thread the edge if it reaches a target block with
1729 more than one predecessor and more than one successor. */
1730 if (potentially_threadable_block (true_edge->dest))
1732 struct edge_info *edge_info;
1733 unsigned int i;
1735 /* Push a marker onto the available expression stack so that we
1736 unwind any expressions related to the TRUE arm before processing
1737 the false arm below. */
1738 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1739 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1741 edge_info = (struct edge_info *) true_edge->aux;
1743 /* If we have info associated with this edge, record it into
1744 our equivalence tables. */
1745 if (edge_info)
1747 cond_equivalence *eq;
1748 tree lhs = edge_info->lhs;
1749 tree rhs = edge_info->rhs;
1751 /* If we have a simple NAME = VALUE equivalence, record it. */
1752 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1753 record_const_or_copy (lhs, rhs);
1755 /* If we have 0 = COND or 1 = COND equivalences, record them
1756 into our expression hash tables. */
1757 for (i = 0; VEC_iterate (cond_equivalence,
1758 edge_info->cond_equivalences, i, eq); ++i)
1759 record_cond (eq);
1762 dom_thread_across_edge (walk_data, true_edge);
1764 /* And restore the various tables to their state before
1765 we threaded this edge. */
1766 remove_local_expressions_from_table ();
1769 /* Similarly for the ELSE arm. */
1770 if (potentially_threadable_block (false_edge->dest))
1772 struct edge_info *edge_info;
1773 unsigned int i;
1775 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1776 edge_info = (struct edge_info *) false_edge->aux;
1778 /* If we have info associated with this edge, record it into
1779 our equivalence tables. */
1780 if (edge_info)
1782 cond_equivalence *eq;
1783 tree lhs = edge_info->lhs;
1784 tree rhs = edge_info->rhs;
1786 /* If we have a simple NAME = VALUE equivalence, record it. */
1787 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1788 record_const_or_copy (lhs, rhs);
1790 /* If we have 0 = COND or 1 = COND equivalences, record them
1791 into our expression hash tables. */
1792 for (i = 0; VEC_iterate (cond_equivalence,
1793 edge_info->cond_equivalences, i, eq); ++i)
1794 record_cond (eq);
1797 /* Now thread the edge. */
1798 dom_thread_across_edge (walk_data, false_edge);
1800 /* No need to remove local expressions from our tables
1801 or restore vars to their original value as that will
1802 be done immediately below. */
1806 remove_local_expressions_from_table ();
1807 restore_vars_to_original_value ();
1810 /* Search for redundant computations in STMT. If any are found, then
1811 replace them with the variable holding the result of the computation.
1813 If safe, record this expression into the available expression hash
1814 table. */
1816 static void
1817 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1819 tree expr_type;
1820 tree cached_lhs;
1821 bool insert = true;
1822 bool assigns_var_p = false;
1824 gimple stmt = gsi_stmt (*gsi);
1826 tree def = gimple_get_lhs (stmt);
1828 /* Certain expressions on the RHS can be optimized away, but can not
1829 themselves be entered into the hash tables. */
1830 if (! def
1831 || TREE_CODE (def) != SSA_NAME
1832 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1833 || gimple_vdef (stmt)
1834 /* Do not record equivalences for increments of ivs. This would create
1835 overlapping live ranges for a very questionable gain. */
1836 || simple_iv_increment_p (stmt))
1837 insert = false;
1839 /* Check if the expression has been computed before. */
1840 cached_lhs = lookup_avail_expr (stmt, insert);
1842 opt_stats.num_exprs_considered++;
1844 /* Get the type of the expression we are trying to optimize. */
1845 if (is_gimple_assign (stmt))
1847 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1848 assigns_var_p = true;
1850 else if (gimple_code (stmt) == GIMPLE_COND)
1851 expr_type = boolean_type_node;
1852 else if (is_gimple_call (stmt))
1854 gcc_assert (gimple_call_lhs (stmt));
1855 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1856 assigns_var_p = true;
1858 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1859 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1860 else
1861 gcc_unreachable ();
1863 if (!cached_lhs)
1864 return;
1866 /* It is safe to ignore types here since we have already done
1867 type checking in the hashing and equality routines. In fact
1868 type checking here merely gets in the way of constant
1869 propagation. Also, make sure that it is safe to propagate
1870 CACHED_LHS into the expression in STMT. */
1871 if ((TREE_CODE (cached_lhs) != SSA_NAME
1872 && (assigns_var_p
1873 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1874 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1876 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
1877 || is_gimple_min_invariant (cached_lhs));
1879 if (dump_file && (dump_flags & TDF_DETAILS))
1881 fprintf (dump_file, " Replaced redundant expr '");
1882 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1883 fprintf (dump_file, "' with '");
1884 print_generic_expr (dump_file, cached_lhs, dump_flags);
1885 fprintf (dump_file, "'\n");
1888 opt_stats.num_re++;
1890 if (assigns_var_p
1891 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1892 cached_lhs = fold_convert (expr_type, cached_lhs);
1894 propagate_tree_value_into_stmt (gsi, cached_lhs);
1896 /* Since it is always necessary to mark the result as modified,
1897 perhaps we should move this into propagate_tree_value_into_stmt
1898 itself. */
1899 gimple_set_modified (gsi_stmt (*gsi), true);
1903 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1904 the available expressions table or the const_and_copies table.
1905 Detect and record those equivalences. */
1906 /* We handle only very simple copy equivalences here. The heavy
1907 lifing is done by eliminate_redundant_computations. */
1909 static void
1910 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1912 tree lhs;
1913 enum tree_code lhs_code;
1915 gcc_assert (is_gimple_assign (stmt));
1917 lhs = gimple_assign_lhs (stmt);
1918 lhs_code = TREE_CODE (lhs);
1920 if (lhs_code == SSA_NAME
1921 && gimple_assign_single_p (stmt))
1923 tree rhs = gimple_assign_rhs1 (stmt);
1925 /* If the RHS of the assignment is a constant or another variable that
1926 may be propagated, register it in the CONST_AND_COPIES table. We
1927 do not need to record unwind data for this, since this is a true
1928 assignment and not an equivalence inferred from a comparison. All
1929 uses of this ssa name are dominated by this assignment, so unwinding
1930 just costs time and space. */
1931 if (may_optimize_p
1932 && (TREE_CODE (rhs) == SSA_NAME
1933 || is_gimple_min_invariant (rhs)))
1935 if (dump_file && (dump_flags & TDF_DETAILS))
1937 fprintf (dump_file, "==== ASGN ");
1938 print_generic_expr (dump_file, lhs, 0);
1939 fprintf (dump_file, " = ");
1940 print_generic_expr (dump_file, rhs, 0);
1941 fprintf (dump_file, "\n");
1944 set_ssa_name_value (lhs, rhs);
1948 /* A memory store, even an aliased store, creates a useful
1949 equivalence. By exchanging the LHS and RHS, creating suitable
1950 vops and recording the result in the available expression table,
1951 we may be able to expose more redundant loads. */
1952 if (!gimple_has_volatile_ops (stmt)
1953 && gimple_references_memory_p (stmt)
1954 && gimple_assign_single_p (stmt)
1955 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1956 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1957 && !is_gimple_reg (lhs))
1959 tree rhs = gimple_assign_rhs1 (stmt);
1960 gimple new_stmt;
1962 /* Build a new statement with the RHS and LHS exchanged. */
1963 if (TREE_CODE (rhs) == SSA_NAME)
1965 /* NOTE tuples. The call to gimple_build_assign below replaced
1966 a call to build_gimple_modify_stmt, which did not set the
1967 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1968 may cause an SSA validation failure, as the LHS may be a
1969 default-initialized name and should have no definition. I'm
1970 a bit dubious of this, as the artificial statement that we
1971 generate here may in fact be ill-formed, but it is simply
1972 used as an internal device in this pass, and never becomes
1973 part of the CFG. */
1974 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1975 new_stmt = gimple_build_assign (rhs, lhs);
1976 SSA_NAME_DEF_STMT (rhs) = defstmt;
1978 else
1979 new_stmt = gimple_build_assign (rhs, lhs);
1981 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1983 /* Finally enter the statement into the available expression
1984 table. */
1985 lookup_avail_expr (new_stmt, true);
1989 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1990 CONST_AND_COPIES. */
1992 static void
1993 cprop_operand (gimple stmt, use_operand_p op_p)
1995 tree val;
1996 tree op = USE_FROM_PTR (op_p);
1998 /* If the operand has a known constant value or it is known to be a
1999 copy of some other variable, use the value or copy stored in
2000 CONST_AND_COPIES. */
2001 val = SSA_NAME_VALUE (op);
2002 if (val && val != op)
2004 /* Do not replace hard register operands in asm statements. */
2005 if (gimple_code (stmt) == GIMPLE_ASM
2006 && !may_propagate_copy_into_asm (op))
2007 return;
2009 /* Certain operands are not allowed to be copy propagated due
2010 to their interaction with exception handling and some GCC
2011 extensions. */
2012 if (!may_propagate_copy (op, val))
2013 return;
2015 /* Do not propagate addresses that point to volatiles into memory
2016 stmts without volatile operands. */
2017 if (POINTER_TYPE_P (TREE_TYPE (val))
2018 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2019 && gimple_has_mem_ops (stmt)
2020 && !gimple_has_volatile_ops (stmt))
2021 return;
2023 /* Do not propagate copies if the propagated value is at a deeper loop
2024 depth than the propagatee. Otherwise, this may move loop variant
2025 variables outside of their loops and prevent coalescing
2026 opportunities. If the value was loop invariant, it will be hoisted
2027 by LICM and exposed for copy propagation. */
2028 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2029 return;
2031 /* Do not propagate copies into simple IV increment statements.
2032 See PR23821 for how this can disturb IV analysis. */
2033 if (TREE_CODE (val) != INTEGER_CST
2034 && simple_iv_increment_p (stmt))
2035 return;
2037 /* Dump details. */
2038 if (dump_file && (dump_flags & TDF_DETAILS))
2040 fprintf (dump_file, " Replaced '");
2041 print_generic_expr (dump_file, op, dump_flags);
2042 fprintf (dump_file, "' with %s '",
2043 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2044 print_generic_expr (dump_file, val, dump_flags);
2045 fprintf (dump_file, "'\n");
2048 if (TREE_CODE (val) != SSA_NAME)
2049 opt_stats.num_const_prop++;
2050 else
2051 opt_stats.num_copy_prop++;
2053 propagate_value (op_p, val);
2055 /* And note that we modified this statement. This is now
2056 safe, even if we changed virtual operands since we will
2057 rescan the statement and rewrite its operands again. */
2058 gimple_set_modified (stmt, true);
2062 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2063 known value for that SSA_NAME (or NULL if no value is known).
2065 Propagate values from CONST_AND_COPIES into the uses, vuses and
2066 vdef_ops of STMT. */
2068 static void
2069 cprop_into_stmt (gimple stmt)
2071 use_operand_p op_p;
2072 ssa_op_iter iter;
2074 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_USE)
2075 cprop_operand (stmt, op_p);
2078 /* Optimize the statement pointed to by iterator SI.
2080 We try to perform some simplistic global redundancy elimination and
2081 constant propagation:
2083 1- To detect global redundancy, we keep track of expressions that have
2084 been computed in this block and its dominators. If we find that the
2085 same expression is computed more than once, we eliminate repeated
2086 computations by using the target of the first one.
2088 2- Constant values and copy assignments. This is used to do very
2089 simplistic constant and copy propagation. When a constant or copy
2090 assignment is found, we map the value on the RHS of the assignment to
2091 the variable in the LHS in the CONST_AND_COPIES table. */
2093 static void
2094 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2096 gimple stmt, old_stmt;
2097 bool may_optimize_p;
2098 bool modified_p = false;
2100 old_stmt = stmt = gsi_stmt (si);
2102 if (dump_file && (dump_flags & TDF_DETAILS))
2104 fprintf (dump_file, "Optimizing statement ");
2105 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2108 if (gimple_code (stmt) == GIMPLE_COND)
2109 canonicalize_comparison (stmt);
2111 update_stmt_if_modified (stmt);
2112 opt_stats.num_stmts++;
2114 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2115 cprop_into_stmt (stmt);
2117 /* If the statement has been modified with constant replacements,
2118 fold its RHS before checking for redundant computations. */
2119 if (gimple_modified_p (stmt))
2121 tree rhs = NULL;
2123 /* Try to fold the statement making sure that STMT is kept
2124 up to date. */
2125 if (fold_stmt (&si))
2127 stmt = gsi_stmt (si);
2128 gimple_set_modified (stmt, true);
2130 if (dump_file && (dump_flags & TDF_DETAILS))
2132 fprintf (dump_file, " Folded to: ");
2133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2137 /* We only need to consider cases that can yield a gimple operand. */
2138 if (gimple_assign_single_p (stmt))
2139 rhs = gimple_assign_rhs1 (stmt);
2140 else if (gimple_code (stmt) == GIMPLE_GOTO)
2141 rhs = gimple_goto_dest (stmt);
2142 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2143 /* This should never be an ADDR_EXPR. */
2144 rhs = gimple_switch_index (stmt);
2146 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2147 recompute_tree_invariant_for_addr_expr (rhs);
2149 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2150 even if fold_stmt updated the stmt already and thus cleared
2151 gimple_modified_p flag on it. */
2152 modified_p = true;
2155 /* Check for redundant computations. Do this optimization only
2156 for assignments that have no volatile ops and conditionals. */
2157 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2158 && ((is_gimple_assign (stmt)
2159 && !gimple_rhs_has_side_effects (stmt))
2160 || (is_gimple_call (stmt)
2161 && gimple_call_lhs (stmt) != NULL_TREE
2162 && !gimple_rhs_has_side_effects (stmt))
2163 || gimple_code (stmt) == GIMPLE_COND
2164 || gimple_code (stmt) == GIMPLE_SWITCH));
2166 if (may_optimize_p)
2168 if (gimple_code (stmt) == GIMPLE_CALL)
2170 /* Resolve __builtin_constant_p. If it hasn't been
2171 folded to integer_one_node by now, it's fairly
2172 certain that the value simply isn't constant. */
2173 tree callee = gimple_call_fndecl (stmt);
2174 if (callee
2175 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2176 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2178 propagate_tree_value_into_stmt (&si, integer_zero_node);
2179 stmt = gsi_stmt (si);
2183 update_stmt_if_modified (stmt);
2184 eliminate_redundant_computations (&si);
2185 stmt = gsi_stmt (si);
2187 /* Perform simple redundant store elimination. */
2188 if (gimple_assign_single_p (stmt)
2189 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2191 tree lhs = gimple_assign_lhs (stmt);
2192 tree rhs = gimple_assign_rhs1 (stmt);
2193 tree cached_lhs;
2194 gimple new_stmt;
2195 if (TREE_CODE (rhs) == SSA_NAME)
2197 tree tem = SSA_NAME_VALUE (rhs);
2198 if (tem)
2199 rhs = tem;
2201 /* Build a new statement with the RHS and LHS exchanged. */
2202 if (TREE_CODE (rhs) == SSA_NAME)
2204 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2205 new_stmt = gimple_build_assign (rhs, lhs);
2206 SSA_NAME_DEF_STMT (rhs) = defstmt;
2208 else
2209 new_stmt = gimple_build_assign (rhs, lhs);
2210 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2211 cached_lhs = lookup_avail_expr (new_stmt, false);
2212 if (cached_lhs
2213 && rhs == cached_lhs)
2215 basic_block bb = gimple_bb (stmt);
2216 int lp_nr = lookup_stmt_eh_lp (stmt);
2217 unlink_stmt_vdef (stmt);
2218 gsi_remove (&si, true);
2219 if (lp_nr != 0)
2221 bitmap_set_bit (need_eh_cleanup, bb->index);
2222 if (dump_file && (dump_flags & TDF_DETAILS))
2223 fprintf (dump_file, " Flagged to clear EH edges.\n");
2225 return;
2230 /* Record any additional equivalences created by this statement. */
2231 if (is_gimple_assign (stmt))
2232 record_equivalences_from_stmt (stmt, may_optimize_p);
2234 /* If STMT is a COND_EXPR and it was modified, then we may know
2235 where it goes. If that is the case, then mark the CFG as altered.
2237 This will cause us to later call remove_unreachable_blocks and
2238 cleanup_tree_cfg when it is safe to do so. It is not safe to
2239 clean things up here since removal of edges and such can trigger
2240 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2241 the manager.
2243 That's all fine and good, except that once SSA_NAMEs are released
2244 to the manager, we must not call create_ssa_name until all references
2245 to released SSA_NAMEs have been eliminated.
2247 All references to the deleted SSA_NAMEs can not be eliminated until
2248 we remove unreachable blocks.
2250 We can not remove unreachable blocks until after we have completed
2251 any queued jump threading.
2253 We can not complete any queued jump threads until we have taken
2254 appropriate variables out of SSA form. Taking variables out of
2255 SSA form can call create_ssa_name and thus we lose.
2257 Ultimately I suspect we're going to need to change the interface
2258 into the SSA_NAME manager. */
2259 if (gimple_modified_p (stmt) || modified_p)
2261 tree val = NULL;
2263 update_stmt_if_modified (stmt);
2265 if (gimple_code (stmt) == GIMPLE_COND)
2266 val = fold_binary_loc (gimple_location (stmt),
2267 gimple_cond_code (stmt), boolean_type_node,
2268 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2269 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2270 val = gimple_switch_index (stmt);
2272 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2273 cfg_altered = true;
2275 /* If we simplified a statement in such a way as to be shown that it
2276 cannot trap, update the eh information and the cfg to match. */
2277 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2279 bitmap_set_bit (need_eh_cleanup, bb->index);
2280 if (dump_file && (dump_flags & TDF_DETAILS))
2281 fprintf (dump_file, " Flagged to clear EH edges.\n");
2286 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2287 If found, return its LHS. Otherwise insert STMT in the table and
2288 return NULL_TREE.
2290 Also, when an expression is first inserted in the table, it is also
2291 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2292 we finish processing this block and its children. */
2294 static tree
2295 lookup_avail_expr (gimple stmt, bool insert)
2297 void **slot;
2298 tree lhs;
2299 tree temp;
2300 struct expr_hash_elt element;
2302 /* Get LHS of assignment or call, else NULL_TREE. */
2303 lhs = gimple_get_lhs (stmt);
2305 initialize_hash_element (stmt, lhs, &element);
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2309 fprintf (dump_file, "LKUP ");
2310 print_expr_hash_elt (dump_file, &element);
2313 /* Don't bother remembering constant assignments and copy operations.
2314 Constants and copy operations are handled by the constant/copy propagator
2315 in optimize_stmt. */
2316 if (element.expr.kind == EXPR_SINGLE
2317 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2318 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2319 return NULL_TREE;
2321 /* Finally try to find the expression in the main expression hash table. */
2322 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2323 (insert ? INSERT : NO_INSERT));
2324 if (slot == NULL)
2325 return NULL_TREE;
2327 if (*slot == NULL)
2329 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2330 *element2 = element;
2331 element2->stamp = element2;
2332 *slot = (void *) element2;
2334 if (dump_file && (dump_flags & TDF_DETAILS))
2336 fprintf (dump_file, "2>>> ");
2337 print_expr_hash_elt (dump_file, element2);
2340 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
2341 return NULL_TREE;
2344 /* Extract the LHS of the assignment so that it can be used as the current
2345 definition of another variable. */
2346 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2348 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2349 use the value from the const_and_copies table. */
2350 if (TREE_CODE (lhs) == SSA_NAME)
2352 temp = SSA_NAME_VALUE (lhs);
2353 if (temp)
2354 lhs = temp;
2357 if (dump_file && (dump_flags & TDF_DETAILS))
2359 fprintf (dump_file, "FIND: ");
2360 print_generic_expr (dump_file, lhs, 0);
2361 fprintf (dump_file, "\n");
2364 return lhs;
2367 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2368 for expressions using the code of the expression and the SSA numbers of
2369 its operands. */
2371 static hashval_t
2372 avail_expr_hash (const void *p)
2374 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2375 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2376 tree vuse;
2377 hashval_t val = 0;
2379 val = iterative_hash_hashable_expr (expr, val);
2381 /* If the hash table entry is not associated with a statement, then we
2382 can just hash the expression and not worry about virtual operands
2383 and such. */
2384 if (!stmt)
2385 return val;
2387 /* Add the SSA version numbers of the vuse operand. This is important
2388 because compound variables like arrays are not renamed in the
2389 operands. Rather, the rename is done on the virtual variable
2390 representing all the elements of the array. */
2391 if ((vuse = gimple_vuse (stmt)))
2392 val = iterative_hash_expr (vuse, val);
2394 return val;
2397 static hashval_t
2398 real_avail_expr_hash (const void *p)
2400 return ((const struct expr_hash_elt *)p)->hash;
2403 static int
2404 avail_expr_eq (const void *p1, const void *p2)
2406 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2407 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2408 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2409 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2410 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2411 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2413 /* This case should apply only when removing entries from the table. */
2414 if (stamp1 == stamp2)
2415 return true;
2417 /* FIXME tuples:
2418 We add stmts to a hash table and them modify them. To detect the case
2419 that we modify a stmt and then search for it, we assume that the hash
2420 is always modified by that change.
2421 We have to fully check why this doesn't happen on trunk or rewrite
2422 this in a more reliable (and easier to understand) way. */
2423 if (((const struct expr_hash_elt *)p1)->hash
2424 != ((const struct expr_hash_elt *)p2)->hash)
2425 return false;
2427 /* In case of a collision, both RHS have to be identical and have the
2428 same VUSE operands. */
2429 if (hashable_expr_equal_p (expr1, expr2)
2430 && types_compatible_p (expr1->type, expr2->type))
2432 /* Note that STMT1 and/or STMT2 may be NULL. */
2433 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2434 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2437 return false;
2440 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2441 up degenerate PHIs created by or exposed by jump threading. */
2443 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2444 NULL. */
2446 tree
2447 degenerate_phi_result (gimple phi)
2449 tree lhs = gimple_phi_result (phi);
2450 tree val = NULL;
2451 size_t i;
2453 /* Ignoring arguments which are the same as LHS, if all the remaining
2454 arguments are the same, then the PHI is a degenerate and has the
2455 value of that common argument. */
2456 for (i = 0; i < gimple_phi_num_args (phi); i++)
2458 tree arg = gimple_phi_arg_def (phi, i);
2460 if (arg == lhs)
2461 continue;
2462 else if (!arg)
2463 break;
2464 else if (!val)
2465 val = arg;
2466 else if (arg == val)
2467 continue;
2468 /* We bring in some of operand_equal_p not only to speed things
2469 up, but also to avoid crashing when dereferencing the type of
2470 a released SSA name. */
2471 else if (TREE_CODE (val) != TREE_CODE (arg)
2472 || TREE_CODE (val) == SSA_NAME
2473 || !operand_equal_p (arg, val, 0))
2474 break;
2476 return (i == gimple_phi_num_args (phi) ? val : NULL);
2479 /* Given a statement STMT, which is either a PHI node or an assignment,
2480 remove it from the IL. */
2482 static void
2483 remove_stmt_or_phi (gimple stmt)
2485 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2487 if (gimple_code (stmt) == GIMPLE_PHI)
2488 remove_phi_node (&gsi, true);
2489 else
2491 gsi_remove (&gsi, true);
2492 release_defs (stmt);
2496 /* Given a statement STMT, which is either a PHI node or an assignment,
2497 return the "rhs" of the node, in the case of a non-degenerate
2498 phi, NULL is returned. */
2500 static tree
2501 get_rhs_or_phi_arg (gimple stmt)
2503 if (gimple_code (stmt) == GIMPLE_PHI)
2504 return degenerate_phi_result (stmt);
2505 else if (gimple_assign_single_p (stmt))
2506 return gimple_assign_rhs1 (stmt);
2507 else
2508 gcc_unreachable ();
2512 /* Given a statement STMT, which is either a PHI node or an assignment,
2513 return the "lhs" of the node. */
2515 static tree
2516 get_lhs_or_phi_result (gimple stmt)
2518 if (gimple_code (stmt) == GIMPLE_PHI)
2519 return gimple_phi_result (stmt);
2520 else if (is_gimple_assign (stmt))
2521 return gimple_assign_lhs (stmt);
2522 else
2523 gcc_unreachable ();
2526 /* Propagate RHS into all uses of LHS (when possible).
2528 RHS and LHS are derived from STMT, which is passed in solely so
2529 that we can remove it if propagation is successful.
2531 When propagating into a PHI node or into a statement which turns
2532 into a trivial copy or constant initialization, set the
2533 appropriate bit in INTERESTING_NAMEs so that we will visit those
2534 nodes as well in an effort to pick up secondary optimization
2535 opportunities. */
2537 static void
2538 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2540 /* First verify that propagation is valid and isn't going to move a
2541 loop variant variable outside its loop. */
2542 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2543 && (TREE_CODE (rhs) != SSA_NAME
2544 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2545 && may_propagate_copy (lhs, rhs)
2546 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2548 use_operand_p use_p;
2549 imm_use_iterator iter;
2550 gimple use_stmt;
2551 bool all = true;
2553 /* Dump details. */
2554 if (dump_file && (dump_flags & TDF_DETAILS))
2556 fprintf (dump_file, " Replacing '");
2557 print_generic_expr (dump_file, lhs, dump_flags);
2558 fprintf (dump_file, "' with %s '",
2559 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2560 print_generic_expr (dump_file, rhs, dump_flags);
2561 fprintf (dump_file, "'\n");
2564 /* Walk over every use of LHS and try to replace the use with RHS.
2565 At this point the only reason why such a propagation would not
2566 be successful would be if the use occurs in an ASM_EXPR. */
2567 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2569 /* Leave debug stmts alone. If we succeed in propagating
2570 all non-debug uses, we'll drop the DEF, and propagation
2571 into debug stmts will occur then. */
2572 if (gimple_debug_bind_p (use_stmt))
2573 continue;
2575 /* It's not always safe to propagate into an ASM_EXPR. */
2576 if (gimple_code (use_stmt) == GIMPLE_ASM
2577 && ! may_propagate_copy_into_asm (lhs))
2579 all = false;
2580 continue;
2583 /* It's not ok to propagate into the definition stmt of RHS.
2584 <bb 9>:
2585 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2586 g_67.1_6 = prephitmp.12_36;
2587 goto <bb 9>;
2588 While this is strictly all dead code we do not want to
2589 deal with this here. */
2590 if (TREE_CODE (rhs) == SSA_NAME
2591 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2593 all = false;
2594 continue;
2597 /* Dump details. */
2598 if (dump_file && (dump_flags & TDF_DETAILS))
2600 fprintf (dump_file, " Original statement:");
2601 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2604 /* Propagate the RHS into this use of the LHS. */
2605 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2606 propagate_value (use_p, rhs);
2608 /* Special cases to avoid useless calls into the folding
2609 routines, operand scanning, etc.
2611 First, propagation into a PHI may cause the PHI to become
2612 a degenerate, so mark the PHI as interesting. No other
2613 actions are necessary.
2615 Second, if we're propagating a virtual operand and the
2616 propagation does not change the underlying _DECL node for
2617 the virtual operand, then no further actions are necessary. */
2618 if (gimple_code (use_stmt) == GIMPLE_PHI
2619 || (! is_gimple_reg (lhs)
2620 && TREE_CODE (rhs) == SSA_NAME
2621 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2623 /* Dump details. */
2624 if (dump_file && (dump_flags & TDF_DETAILS))
2626 fprintf (dump_file, " Updated statement:");
2627 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2630 /* Propagation into a PHI may expose new degenerate PHIs,
2631 so mark the result of the PHI as interesting. */
2632 if (gimple_code (use_stmt) == GIMPLE_PHI)
2634 tree result = get_lhs_or_phi_result (use_stmt);
2635 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2638 continue;
2641 /* From this point onward we are propagating into a
2642 real statement. Folding may (or may not) be possible,
2643 we may expose new operands, expose dead EH edges,
2644 etc. */
2645 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2646 cannot fold a call that simplifies to a constant,
2647 because the GIMPLE_CALL must be replaced by a
2648 GIMPLE_ASSIGN, and there is no way to effect such a
2649 transformation in-place. We might want to consider
2650 using the more general fold_stmt here. */
2652 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
2653 fold_stmt_inplace (&gsi);
2656 /* Sometimes propagation can expose new operands to the
2657 renamer. */
2658 update_stmt (use_stmt);
2660 /* Dump details. */
2661 if (dump_file && (dump_flags & TDF_DETAILS))
2663 fprintf (dump_file, " Updated statement:");
2664 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2667 /* If we replaced a variable index with a constant, then
2668 we would need to update the invariant flag for ADDR_EXPRs. */
2669 if (gimple_assign_single_p (use_stmt)
2670 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2671 recompute_tree_invariant_for_addr_expr
2672 (gimple_assign_rhs1 (use_stmt));
2674 /* If we cleaned up EH information from the statement,
2675 mark its containing block as needing EH cleanups. */
2676 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2678 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2679 if (dump_file && (dump_flags & TDF_DETAILS))
2680 fprintf (dump_file, " Flagged to clear EH edges.\n");
2683 /* Propagation may expose new trivial copy/constant propagation
2684 opportunities. */
2685 if (gimple_assign_single_p (use_stmt)
2686 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2687 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2688 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2690 tree result = get_lhs_or_phi_result (use_stmt);
2691 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2694 /* Propagation into these nodes may make certain edges in
2695 the CFG unexecutable. We want to identify them as PHI nodes
2696 at the destination of those unexecutable edges may become
2697 degenerates. */
2698 else if (gimple_code (use_stmt) == GIMPLE_COND
2699 || gimple_code (use_stmt) == GIMPLE_SWITCH
2700 || gimple_code (use_stmt) == GIMPLE_GOTO)
2702 tree val;
2704 if (gimple_code (use_stmt) == GIMPLE_COND)
2705 val = fold_binary_loc (gimple_location (use_stmt),
2706 gimple_cond_code (use_stmt),
2707 boolean_type_node,
2708 gimple_cond_lhs (use_stmt),
2709 gimple_cond_rhs (use_stmt));
2710 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2711 val = gimple_switch_index (use_stmt);
2712 else
2713 val = gimple_goto_dest (use_stmt);
2715 if (val && is_gimple_min_invariant (val))
2717 basic_block bb = gimple_bb (use_stmt);
2718 edge te = find_taken_edge (bb, val);
2719 edge_iterator ei;
2720 edge e;
2721 gimple_stmt_iterator gsi, psi;
2723 /* Remove all outgoing edges except TE. */
2724 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2726 if (e != te)
2728 /* Mark all the PHI nodes at the destination of
2729 the unexecutable edge as interesting. */
2730 for (psi = gsi_start_phis (e->dest);
2731 !gsi_end_p (psi);
2732 gsi_next (&psi))
2734 gimple phi = gsi_stmt (psi);
2736 tree result = gimple_phi_result (phi);
2737 int version = SSA_NAME_VERSION (result);
2739 bitmap_set_bit (interesting_names, version);
2742 te->probability += e->probability;
2744 te->count += e->count;
2745 remove_edge (e);
2746 cfg_altered = true;
2748 else
2749 ei_next (&ei);
2752 gsi = gsi_last_bb (gimple_bb (use_stmt));
2753 gsi_remove (&gsi, true);
2755 /* And fixup the flags on the single remaining edge. */
2756 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2757 te->flags &= ~EDGE_ABNORMAL;
2758 te->flags |= EDGE_FALLTHRU;
2759 if (te->probability > REG_BR_PROB_BASE)
2760 te->probability = REG_BR_PROB_BASE;
2765 /* Ensure there is nothing else to do. */
2766 gcc_assert (!all || has_zero_uses (lhs));
2768 /* If we were able to propagate away all uses of LHS, then
2769 we can remove STMT. */
2770 if (all)
2771 remove_stmt_or_phi (stmt);
2775 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2776 a statement that is a trivial copy or constant initialization.
2778 Attempt to eliminate T by propagating its RHS into all uses of
2779 its LHS. This may in turn set new bits in INTERESTING_NAMES
2780 for nodes we want to revisit later.
2782 All exit paths should clear INTERESTING_NAMES for the result
2783 of STMT. */
2785 static void
2786 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2788 tree lhs = get_lhs_or_phi_result (stmt);
2789 tree rhs;
2790 int version = SSA_NAME_VERSION (lhs);
2792 /* If the LHS of this statement or PHI has no uses, then we can
2793 just eliminate it. This can occur if, for example, the PHI
2794 was created by block duplication due to threading and its only
2795 use was in the conditional at the end of the block which was
2796 deleted. */
2797 if (has_zero_uses (lhs))
2799 bitmap_clear_bit (interesting_names, version);
2800 remove_stmt_or_phi (stmt);
2801 return;
2804 /* Get the RHS of the assignment or PHI node if the PHI is a
2805 degenerate. */
2806 rhs = get_rhs_or_phi_arg (stmt);
2807 if (!rhs)
2809 bitmap_clear_bit (interesting_names, version);
2810 return;
2813 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2815 /* Note that STMT may well have been deleted by now, so do
2816 not access it, instead use the saved version # to clear
2817 T's entry in the worklist. */
2818 bitmap_clear_bit (interesting_names, version);
2821 /* The first phase in degenerate PHI elimination.
2823 Eliminate the degenerate PHIs in BB, then recurse on the
2824 dominator children of BB. */
2826 static void
2827 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2829 gimple_stmt_iterator gsi;
2830 basic_block son;
2832 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2834 gimple phi = gsi_stmt (gsi);
2836 eliminate_const_or_copy (phi, interesting_names);
2839 /* Recurse into the dominator children of BB. */
2840 for (son = first_dom_son (CDI_DOMINATORS, bb);
2841 son;
2842 son = next_dom_son (CDI_DOMINATORS, son))
2843 eliminate_degenerate_phis_1 (son, interesting_names);
2847 /* A very simple pass to eliminate degenerate PHI nodes from the
2848 IL. This is meant to be fast enough to be able to be run several
2849 times in the optimization pipeline.
2851 Certain optimizations, particularly those which duplicate blocks
2852 or remove edges from the CFG can create or expose PHIs which are
2853 trivial copies or constant initializations.
2855 While we could pick up these optimizations in DOM or with the
2856 combination of copy-prop and CCP, those solutions are far too
2857 heavy-weight for our needs.
2859 This implementation has two phases so that we can efficiently
2860 eliminate the first order degenerate PHIs and second order
2861 degenerate PHIs.
2863 The first phase performs a dominator walk to identify and eliminate
2864 the vast majority of the degenerate PHIs. When a degenerate PHI
2865 is identified and eliminated any affected statements or PHIs
2866 are put on a worklist.
2868 The second phase eliminates degenerate PHIs and trivial copies
2869 or constant initializations using the worklist. This is how we
2870 pick up the secondary optimization opportunities with minimal
2871 cost. */
2873 static unsigned int
2874 eliminate_degenerate_phis (void)
2876 bitmap interesting_names;
2877 bitmap interesting_names1;
2879 /* Bitmap of blocks which need EH information updated. We can not
2880 update it on-the-fly as doing so invalidates the dominator tree. */
2881 need_eh_cleanup = BITMAP_ALLOC (NULL);
2883 /* INTERESTING_NAMES is effectively our worklist, indexed by
2884 SSA_NAME_VERSION.
2886 A set bit indicates that the statement or PHI node which
2887 defines the SSA_NAME should be (re)examined to determine if
2888 it has become a degenerate PHI or trivial const/copy propagation
2889 opportunity.
2891 Experiments have show we generally get better compilation
2892 time behavior with bitmaps rather than sbitmaps. */
2893 interesting_names = BITMAP_ALLOC (NULL);
2894 interesting_names1 = BITMAP_ALLOC (NULL);
2896 calculate_dominance_info (CDI_DOMINATORS);
2897 cfg_altered = false;
2899 /* First phase. Eliminate degenerate PHIs via a dominator
2900 walk of the CFG.
2902 Experiments have indicated that we generally get better
2903 compile-time behavior by visiting blocks in the first
2904 phase in dominator order. Presumably this is because walking
2905 in dominator order leaves fewer PHIs for later examination
2906 by the worklist phase. */
2907 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2909 /* Second phase. Eliminate second order degenerate PHIs as well
2910 as trivial copies or constant initializations identified by
2911 the first phase or this phase. Basically we keep iterating
2912 until our set of INTERESTING_NAMEs is empty. */
2913 while (!bitmap_empty_p (interesting_names))
2915 unsigned int i;
2916 bitmap_iterator bi;
2918 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2919 changed during the loop. Copy it to another bitmap and
2920 use that. */
2921 bitmap_copy (interesting_names1, interesting_names);
2923 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2925 tree name = ssa_name (i);
2927 /* Ignore SSA_NAMEs that have been released because
2928 their defining statement was deleted (unreachable). */
2929 if (name)
2930 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2931 interesting_names);
2935 if (cfg_altered)
2936 free_dominance_info (CDI_DOMINATORS);
2938 /* Propagation of const and copies may make some EH edges dead. Purge
2939 such edges from the CFG as needed. */
2940 if (!bitmap_empty_p (need_eh_cleanup))
2942 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2943 BITMAP_FREE (need_eh_cleanup);
2946 BITMAP_FREE (interesting_names);
2947 BITMAP_FREE (interesting_names1);
2948 return 0;
2951 struct gimple_opt_pass pass_phi_only_cprop =
2954 GIMPLE_PASS,
2955 "phicprop", /* name */
2956 gate_dominator, /* gate */
2957 eliminate_degenerate_phis, /* execute */
2958 NULL, /* sub */
2959 NULL, /* next */
2960 0, /* static_pass_number */
2961 TV_TREE_PHI_CPROP, /* tv_id */
2962 PROP_cfg | PROP_ssa, /* properties_required */
2963 0, /* properties_provided */
2964 0, /* properties_destroyed */
2965 0, /* todo_flags_start */
2966 TODO_cleanup_cfg
2967 | TODO_ggc_collect
2968 | TODO_verify_ssa
2969 | TODO_verify_stmts
2970 | TODO_update_ssa /* todo_flags_finish */