* gcc.dg/compat/struct-layout-1_generate.c (dg_options): New. Moved
[official-gcc.git] / gcc / tree-ssa-dom.c
blobbf2049eb8d8c1ce5540f8fa825e2ed914c0a6db7
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "cfgloop.h"
33 #include "output.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
45 #include "params.h"
47 /* This file implements optimizations on the dominator tree. */
49 /* Representation of a "naked" right-hand-side expression, to be used
50 in recording available expressions in the expression hash table. */
52 enum expr_kind
54 EXPR_SINGLE,
55 EXPR_UNARY,
56 EXPR_BINARY,
57 EXPR_CALL
60 struct hashable_expr
62 tree type;
63 enum expr_kind kind;
64 union {
65 struct { tree rhs; } single;
66 struct { enum tree_code op; tree opnd; } unary;
67 struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
68 struct { tree fn; bool pure; size_t nargs; tree *args; } call;
69 } ops;
72 /* Structure for recording known values of a conditional expression
73 at the exits from its block. */
75 struct cond_equivalence
77 struct hashable_expr cond;
78 tree value;
81 /* Structure for recording edge equivalences as well as any pending
82 edge redirections during the dominator optimizer.
84 Computing and storing the edge equivalences instead of creating
85 them on-demand can save significant amounts of time, particularly
86 for pathological cases involving switch statements.
88 These structures live for a single iteration of the dominator
89 optimizer in the edge's AUX field. At the end of an iteration we
90 free each of these structures and update the AUX field to point
91 to any requested redirection target (the code for updating the
92 CFG and SSA graph for edge redirection expects redirection edge
93 targets to be in the AUX field for each edge. */
95 struct edge_info
97 /* If this edge creates a simple equivalence, the LHS and RHS of
98 the equivalence will be stored here. */
99 tree lhs;
100 tree rhs;
102 /* Traversing an edge may also indicate one or more particular conditions
103 are true or false. The number of recorded conditions can vary, but
104 can be determined by the condition's code. So we have an array
105 and its maximum index rather than use a varray. */
106 struct cond_equivalence *cond_equivalences;
107 unsigned int max_cond_equivalences;
110 /* Hash table with expressions made available during the renaming process.
111 When an assignment of the form X_i = EXPR is found, the statement is
112 stored in this table. If the same expression EXPR is later found on the
113 RHS of another statement, it is replaced with X_i (thus performing
114 global redundancy elimination). Similarly as we pass through conditionals
115 we record the conditional itself as having either a true or false value
116 in this table. */
117 static htab_t avail_exprs;
119 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
120 expressions it enters into the hash table along with a marker entry
121 (null). When we finish processing the block, we pop off entries and
122 remove the expressions from the global hash table until we hit the
123 marker. */
124 typedef struct expr_hash_elt * expr_hash_elt_t;
125 DEF_VEC_P(expr_hash_elt_t);
126 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
128 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
130 /* Stack of statements we need to rescan during finalization for newly
131 exposed variables.
133 Statement rescanning must occur after the current block's available
134 expressions are removed from AVAIL_EXPRS. Else we may change the
135 hash code for an expression and be unable to find/remove it from
136 AVAIL_EXPRS. */
137 typedef gimple *gimple_p;
138 DEF_VEC_P(gimple_p);
139 DEF_VEC_ALLOC_P(gimple_p,heap);
141 static VEC(gimple_p,heap) *stmts_to_rescan;
143 /* Structure for entries in the expression hash table. */
145 struct expr_hash_elt
147 /* The value (lhs) of this expression. */
148 tree lhs;
150 /* The expression (rhs) we want to record. */
151 struct hashable_expr expr;
153 /* The stmt pointer if this element corresponds to a statement. */
154 gimple stmt;
156 /* The hash value for RHS. */
157 hashval_t hash;
159 /* A unique stamp, typically the address of the hash
160 element itself, used in removing entries from the table. */
161 struct expr_hash_elt *stamp;
164 /* Stack of dest,src pairs that need to be restored during finalization.
166 A NULL entry is used to mark the end of pairs which need to be
167 restored during finalization of this block. */
168 static VEC(tree,heap) *const_and_copies_stack;
170 /* Track whether or not we have changed the control flow graph. */
171 static bool cfg_altered;
173 /* Bitmap of blocks that have had EH statements cleaned. We should
174 remove their dead edges eventually. */
175 static bitmap need_eh_cleanup;
177 /* Statistics for dominator optimizations. */
178 struct opt_stats_d
180 long num_stmts;
181 long num_exprs_considered;
182 long num_re;
183 long num_const_prop;
184 long num_copy_prop;
187 static struct opt_stats_d opt_stats;
189 /* Local functions. */
190 static void optimize_stmt (struct dom_walk_data *,
191 basic_block,
192 gimple_stmt_iterator);
193 static tree lookup_avail_expr (gimple, bool);
194 static hashval_t avail_expr_hash (const void *);
195 static hashval_t real_avail_expr_hash (const void *);
196 static int avail_expr_eq (const void *, const void *);
197 static void htab_statistics (FILE *, htab_t);
198 static void record_cond (struct cond_equivalence *);
199 static void record_const_or_copy (tree, tree);
200 static void record_equality (tree, tree);
201 static void record_equivalences_from_phis (basic_block);
202 static void record_equivalences_from_incoming_edge (basic_block);
203 static bool eliminate_redundant_computations (gimple_stmt_iterator *);
204 static void record_equivalences_from_stmt (gimple, int);
205 static void dom_thread_across_edge (struct dom_walk_data *, edge);
206 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
207 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
208 static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
209 static void remove_local_expressions_from_table (void);
210 static void restore_vars_to_original_value (void);
211 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
214 /* Given a statement STMT, initialize the hash table element pointed to
215 by ELEMENT. */
217 static void
218 initialize_hash_element (gimple stmt, tree lhs,
219 struct expr_hash_elt *element)
221 enum gimple_code code = gimple_code (stmt);
222 struct hashable_expr *expr = &element->expr;
224 if (code == GIMPLE_ASSIGN)
226 enum tree_code subcode = gimple_assign_rhs_code (stmt);
228 expr->type = NULL_TREE;
230 switch (get_gimple_rhs_class (subcode))
232 case GIMPLE_SINGLE_RHS:
233 expr->kind = EXPR_SINGLE;
234 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
235 break;
236 case GIMPLE_UNARY_RHS:
237 expr->kind = EXPR_UNARY;
238 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
239 expr->ops.unary.op = subcode;
240 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
241 break;
242 case GIMPLE_BINARY_RHS:
243 expr->kind = EXPR_BINARY;
244 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
245 expr->ops.binary.op = subcode;
246 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
247 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
248 break;
249 default:
250 gcc_unreachable ();
253 else if (code == GIMPLE_COND)
255 expr->type = boolean_type_node;
256 expr->kind = EXPR_BINARY;
257 expr->ops.binary.op = gimple_cond_code (stmt);
258 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
259 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
261 else if (code == GIMPLE_CALL)
263 size_t nargs = gimple_call_num_args (stmt);
264 size_t i;
266 gcc_assert (gimple_call_lhs (stmt));
268 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
269 expr->kind = EXPR_CALL;
270 expr->ops.call.fn = gimple_call_fn (stmt);
272 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
273 expr->ops.call.pure = true;
274 else
275 expr->ops.call.pure = false;
277 expr->ops.call.nargs = nargs;
278 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
279 for (i = 0; i < nargs; i++)
280 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
282 else if (code == GIMPLE_SWITCH)
284 expr->type = TREE_TYPE (gimple_switch_index (stmt));
285 expr->kind = EXPR_SINGLE;
286 expr->ops.single.rhs = gimple_switch_index (stmt);
288 else if (code == GIMPLE_GOTO)
290 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
291 expr->kind = EXPR_SINGLE;
292 expr->ops.single.rhs = gimple_goto_dest (stmt);
294 else
295 gcc_unreachable ();
297 element->lhs = lhs;
298 element->stmt = stmt;
299 element->hash = avail_expr_hash (element);
300 element->stamp = element;
303 /* Given a conditional expression COND as a tree, initialize
304 a hashable_expr expression EXPR. The conditional must be a
305 comparison or logical negation. A constant or a variable is
306 not permitted. */
308 static void
309 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
311 expr->type = boolean_type_node;
313 if (COMPARISON_CLASS_P (cond))
315 expr->kind = EXPR_BINARY;
316 expr->ops.binary.op = TREE_CODE (cond);
317 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
318 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
320 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
322 expr->kind = EXPR_UNARY;
323 expr->ops.unary.op = TRUTH_NOT_EXPR;
324 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
326 else
327 gcc_unreachable ();
330 /* Given a hashable_expr expression EXPR and an LHS,
331 initialize the hash table element pointed to by ELEMENT. */
333 static void
334 initialize_hash_element_from_expr (struct hashable_expr *expr,
335 tree lhs,
336 struct expr_hash_elt *element)
338 element->expr = *expr;
339 element->lhs = lhs;
340 element->stmt = NULL;
341 element->hash = avail_expr_hash (element);
342 element->stamp = element;
345 /* Compare two hashable_expr structures for equivalence.
346 They are considered equivalent when the the expressions
347 they denote must necessarily be equal. The logic is intended
348 to follow that of operand_equal_p in fold-const.c */
350 static bool
351 hashable_expr_equal_p (const struct hashable_expr *expr0,
352 const struct hashable_expr *expr1)
354 tree type0 = expr0->type;
355 tree type1 = expr1->type;
357 /* If either type is NULL, there is nothing to check. */
358 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
359 return false;
361 /* If both types don't have the same signedness, precision, and mode,
362 then we can't consider them equal. */
363 if (type0 != type1
364 && (TREE_CODE (type0) == ERROR_MARK
365 || TREE_CODE (type1) == ERROR_MARK
366 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
367 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
368 || TYPE_MODE (type0) != TYPE_MODE (type1)))
369 return false;
371 if (expr0->kind != expr1->kind)
372 return false;
374 switch (expr0->kind)
376 case EXPR_SINGLE:
377 return operand_equal_p (expr0->ops.single.rhs,
378 expr1->ops.single.rhs, 0);
380 case EXPR_UNARY:
381 if (expr0->ops.unary.op != expr1->ops.unary.op)
382 return false;
384 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
385 || expr0->ops.unary.op == NON_LVALUE_EXPR)
386 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
387 return false;
389 return operand_equal_p (expr0->ops.unary.opnd,
390 expr1->ops.unary.opnd, 0);
392 case EXPR_BINARY:
394 if (expr0->ops.binary.op != expr1->ops.binary.op)
395 return false;
397 if (operand_equal_p (expr0->ops.binary.opnd0,
398 expr1->ops.binary.opnd0, 0)
399 && operand_equal_p (expr0->ops.binary.opnd1,
400 expr1->ops.binary.opnd1, 0))
401 return true;
403 /* For commutative ops, allow the other order. */
404 return (commutative_tree_code (expr0->ops.binary.op)
405 && operand_equal_p (expr0->ops.binary.opnd0,
406 expr1->ops.binary.opnd1, 0)
407 && operand_equal_p (expr0->ops.binary.opnd1,
408 expr1->ops.binary.opnd0, 0));
411 case EXPR_CALL:
413 size_t i;
415 /* If the calls are to different functions, then they
416 clearly cannot be equal. */
417 if (! operand_equal_p (expr0->ops.call.fn,
418 expr1->ops.call.fn, 0))
419 return false;
421 if (! expr0->ops.call.pure)
422 return false;
424 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
425 return false;
427 for (i = 0; i < expr0->ops.call.nargs; i++)
428 if (! operand_equal_p (expr0->ops.call.args[i],
429 expr1->ops.call.args[i], 0))
430 return false;
432 return true;
435 default:
436 gcc_unreachable ();
440 /* Compute a hash value for a hashable_expr value EXPR and a
441 previously accumulated hash value VAL. If two hashable_expr
442 values compare equal with hashable_expr_equal_p, they must
443 hash to the same value, given an identical value of VAL.
444 The logic is intended to follow iterative_hash_expr in tree.c. */
446 static hashval_t
447 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
449 switch (expr->kind)
451 case EXPR_SINGLE:
452 val = iterative_hash_expr (expr->ops.single.rhs, val);
453 break;
455 case EXPR_UNARY:
456 val = iterative_hash_object (expr->ops.unary.op, val);
458 /* Make sure to include signedness in the hash computation.
459 Don't hash the type, that can lead to having nodes which
460 compare equal according to operand_equal_p, but which
461 have different hash codes. */
462 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
463 || expr->ops.unary.op == NON_LVALUE_EXPR)
464 val += TYPE_UNSIGNED (expr->type);
466 val = iterative_hash_expr (expr->ops.unary.opnd, val);
467 break;
469 case EXPR_BINARY:
470 val = iterative_hash_object (expr->ops.binary.op, val);
471 if (commutative_tree_code (expr->ops.binary.op))
472 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
473 expr->ops.binary.opnd1, val);
474 else
476 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
477 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
479 break;
481 case EXPR_CALL:
483 size_t i;
484 enum tree_code code = CALL_EXPR;
486 val = iterative_hash_object (code, val);
487 val = iterative_hash_expr (expr->ops.call.fn, val);
488 for (i = 0; i < expr->ops.call.nargs; i++)
489 val = iterative_hash_expr (expr->ops.call.args[i], val);
491 break;
493 default:
494 gcc_unreachable ();
497 return val;
500 /* Print a diagnostic dump of an expression hash table entry. */
502 static void
503 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
505 if (element->stmt)
506 fprintf (stream, "STMT ");
507 else
508 fprintf (stream, "COND ");
510 if (element->lhs)
512 print_generic_expr (stream, element->lhs, 0);
513 fprintf (stream, " = ");
516 switch (element->expr.kind)
518 case EXPR_SINGLE:
519 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
520 break;
522 case EXPR_UNARY:
523 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
524 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
525 break;
527 case EXPR_BINARY:
528 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
529 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
530 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
531 break;
533 case EXPR_CALL:
535 size_t i;
536 size_t nargs = element->expr.ops.call.nargs;
538 print_generic_expr (stream, element->expr.ops.call.fn, 0);
539 fprintf (stream, " (");
540 for (i = 0; i < nargs; i++)
542 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
543 if (i + 1 < nargs)
544 fprintf (stream, ", ");
546 fprintf (stream, ")");
548 break;
550 fprintf (stream, "\n");
552 if (element->stmt)
554 fprintf (stream, " ");
555 print_gimple_stmt (stream, element->stmt, 0, 0);
559 /* Delete an expr_hash_elt and reclaim its storage. */
561 static void
562 free_expr_hash_elt (void *elt)
564 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
566 if (element->expr.kind == EXPR_CALL)
567 free (element->expr.ops.call.args);
569 free (element);
572 /* Allocate an EDGE_INFO for edge E and attach it to E.
573 Return the new EDGE_INFO structure. */
575 static struct edge_info *
576 allocate_edge_info (edge e)
578 struct edge_info *edge_info;
580 edge_info = XCNEW (struct edge_info);
582 e->aux = edge_info;
583 return edge_info;
586 /* Free all EDGE_INFO structures associated with edges in the CFG.
587 If a particular edge can be threaded, copy the redirection
588 target from the EDGE_INFO structure into the edge's AUX field
589 as required by code to update the CFG and SSA graph for
590 jump threading. */
592 static void
593 free_all_edge_infos (void)
595 basic_block bb;
596 edge_iterator ei;
597 edge e;
599 FOR_EACH_BB (bb)
601 FOR_EACH_EDGE (e, ei, bb->preds)
603 struct edge_info *edge_info = (struct edge_info *) e->aux;
605 if (edge_info)
607 if (edge_info->cond_equivalences)
608 free (edge_info->cond_equivalences);
609 free (edge_info);
610 e->aux = NULL;
616 /* Jump threading, redundancy elimination and const/copy propagation.
618 This pass may expose new symbols that need to be renamed into SSA. For
619 every new symbol exposed, its corresponding bit will be set in
620 VARS_TO_RENAME. */
622 static unsigned int
623 tree_ssa_dominator_optimize (void)
625 struct dom_walk_data walk_data;
626 unsigned int i;
628 memset (&opt_stats, 0, sizeof (opt_stats));
630 /* Create our hash tables. */
631 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
632 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
633 const_and_copies_stack = VEC_alloc (tree, heap, 20);
634 stmts_to_rescan = VEC_alloc (gimple_p, heap, 20);
635 need_eh_cleanup = BITMAP_ALLOC (NULL);
637 /* Setup callbacks for the generic dominator tree walker. */
638 walk_data.walk_stmts_backward = false;
639 walk_data.dom_direction = CDI_DOMINATORS;
640 walk_data.initialize_block_local_data = NULL;
641 walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
642 walk_data.before_dom_children_walk_stmts = optimize_stmt;
643 walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
644 walk_data.after_dom_children_before_stmts = NULL;
645 walk_data.after_dom_children_walk_stmts = NULL;
646 walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
647 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
648 When we attach more stuff we'll need to fill this out with a real
649 structure. */
650 walk_data.global_data = NULL;
651 walk_data.block_local_data_size = 0;
652 walk_data.interesting_blocks = NULL;
654 /* Now initialize the dominator walker. */
655 init_walk_dominator_tree (&walk_data);
657 calculate_dominance_info (CDI_DOMINATORS);
658 cfg_altered = false;
660 /* We need to know loop structures in order to avoid destroying them
661 in jump threading. Note that we still can e.g. thread through loop
662 headers to an exit edge, or through loop header to the loop body, assuming
663 that we update the loop info. */
664 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
666 /* We need accurate information regarding back edges in the CFG
667 for jump threading; this may include back edges that are not part of
668 a single loop. */
669 mark_dfs_back_edges ();
671 /* Recursively walk the dominator tree optimizing statements. */
672 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
675 gimple_stmt_iterator gsi;
676 basic_block bb;
677 FOR_EACH_BB (bb)
678 {for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
679 update_stmt_if_modified (gsi_stmt (gsi));
683 /* If we exposed any new variables, go ahead and put them into
684 SSA form now, before we handle jump threading. This simplifies
685 interactions between rewriting of _DECL nodes into SSA form
686 and rewriting SSA_NAME nodes into SSA form after block
687 duplication and CFG manipulation. */
688 update_ssa (TODO_update_ssa);
690 free_all_edge_infos ();
692 /* Thread jumps, creating duplicate blocks as needed. */
693 cfg_altered |= thread_through_all_blocks (first_pass_instance);
695 if (cfg_altered)
696 free_dominance_info (CDI_DOMINATORS);
698 /* Removal of statements may make some EH edges dead. Purge
699 such edges from the CFG as needed. */
700 if (!bitmap_empty_p (need_eh_cleanup))
702 unsigned i;
703 bitmap_iterator bi;
705 /* Jump threading may have created forwarder blocks from blocks
706 needing EH cleanup; the new successor of these blocks, which
707 has inherited from the original block, needs the cleanup. */
708 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
710 basic_block bb = BASIC_BLOCK (i);
711 if (single_succ_p (bb) == 1
712 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
714 bitmap_clear_bit (need_eh_cleanup, i);
715 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
719 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
720 bitmap_zero (need_eh_cleanup);
723 /* Finally, remove everything except invariants in SSA_NAME_VALUE.
725 Long term we will be able to let everything in SSA_NAME_VALUE
726 persist. However, for now, we know this is the safe thing to do. */
727 for (i = 0; i < num_ssa_names; i++)
729 tree name = ssa_name (i);
730 tree value;
732 if (!name)
733 continue;
735 value = SSA_NAME_VALUE (name);
736 if (value && !is_gimple_min_invariant (value))
737 SSA_NAME_VALUE (name) = NULL;
740 statistics_counter_event (cfun, "Redundant expressions eliminated",
741 opt_stats.num_re);
742 statistics_counter_event (cfun, "Constants propagated",
743 opt_stats.num_const_prop);
744 statistics_counter_event (cfun, "Copies propagated",
745 opt_stats.num_copy_prop);
747 /* Debugging dumps. */
748 if (dump_file && (dump_flags & TDF_STATS))
749 dump_dominator_optimization_stats (dump_file);
751 loop_optimizer_finalize ();
753 /* Delete our main hashtable. */
754 htab_delete (avail_exprs);
756 /* And finalize the dominator walker. */
757 fini_walk_dominator_tree (&walk_data);
759 /* Free asserted bitmaps and stacks. */
760 BITMAP_FREE (need_eh_cleanup);
762 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
763 VEC_free (tree, heap, const_and_copies_stack);
764 VEC_free (gimple_p, heap, stmts_to_rescan);
766 return 0;
769 static bool
770 gate_dominator (void)
772 return flag_tree_dom != 0;
775 struct gimple_opt_pass pass_dominator =
778 GIMPLE_PASS,
779 "dom", /* name */
780 gate_dominator, /* gate */
781 tree_ssa_dominator_optimize, /* execute */
782 NULL, /* sub */
783 NULL, /* next */
784 0, /* static_pass_number */
785 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
786 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
787 0, /* properties_provided */
788 0, /* properties_destroyed */
789 0, /* todo_flags_start */
790 TODO_dump_func
791 | TODO_update_ssa
792 | TODO_cleanup_cfg
793 | TODO_verify_ssa /* todo_flags_finish */
798 /* Given a conditional statement CONDSTMT, convert the
799 condition to a canonical form. */
801 static void
802 canonicalize_comparison (gimple condstmt)
804 tree op0;
805 tree op1;
806 enum tree_code code;
808 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
810 op0 = gimple_cond_lhs (condstmt);
811 op1 = gimple_cond_rhs (condstmt);
813 code = gimple_cond_code (condstmt);
815 /* If it would be profitable to swap the operands, then do so to
816 canonicalize the statement, enabling better optimization.
818 By placing canonicalization of such expressions here we
819 transparently keep statements in canonical form, even
820 when the statement is modified. */
821 if (tree_swap_operands_p (op0, op1, false))
823 /* For relationals we need to swap the operands
824 and change the code. */
825 if (code == LT_EXPR
826 || code == GT_EXPR
827 || code == LE_EXPR
828 || code == GE_EXPR)
830 code = swap_tree_comparison (code);
832 gimple_cond_set_code (condstmt, code);
833 gimple_cond_set_lhs (condstmt, op1);
834 gimple_cond_set_rhs (condstmt, op0);
836 update_stmt (condstmt);
841 /* Initialize local stacks for this optimizer and record equivalences
842 upon entry to BB. Equivalences can come from the edge traversed to
843 reach BB or they may come from PHI nodes at the start of BB. */
845 static void
846 dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
847 basic_block bb)
849 if (dump_file && (dump_flags & TDF_DETAILS))
850 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
852 /* Push a marker on the stacks of local information so that we know how
853 far to unwind when we finalize this block. */
854 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
855 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
857 record_equivalences_from_incoming_edge (bb);
859 /* PHI nodes can create equivalences too. */
860 record_equivalences_from_phis (bb);
863 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
864 LIMIT entries left in LOCALs. */
866 static void
867 remove_local_expressions_from_table (void)
869 /* Remove all the expressions made available in this block. */
870 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
872 struct expr_hash_elt element;
873 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
875 if (victim == NULL)
876 break;
878 element = *victim;
880 /* This must precede the actual removal from the hash table,
881 as ELEMENT and the table entry may share a call argument
882 vector which will be freed during removal. */
883 if (dump_file && (dump_flags & TDF_DETAILS))
885 fprintf (dump_file, "<<<< ");
886 print_expr_hash_elt (dump_file, &element);
889 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
893 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
894 CONST_AND_COPIES to its original state, stopping when we hit a
895 NULL marker. */
897 static void
898 restore_vars_to_original_value (void)
900 while (VEC_length (tree, const_and_copies_stack) > 0)
902 tree prev_value, dest;
904 dest = VEC_pop (tree, const_and_copies_stack);
906 if (dest == NULL)
907 break;
909 if (dump_file && (dump_flags & TDF_DETAILS))
911 fprintf (dump_file, "<<<< COPY ");
912 print_generic_expr (dump_file, dest, 0);
913 fprintf (dump_file, " = ");
914 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
915 fprintf (dump_file, "\n");
918 prev_value = VEC_pop (tree, const_and_copies_stack);
919 SSA_NAME_VALUE (dest) = prev_value;
923 /* A trivial wrapper so that we can present the generic jump
924 threading code with a simple API for simplifying statements. */
925 static tree
926 simplify_stmt_for_jump_threading (gimple stmt,
927 gimple within_stmt ATTRIBUTE_UNUSED)
929 return lookup_avail_expr (stmt, false);
932 /* Wrapper for common code to attempt to thread an edge. For example,
933 it handles lazily building the dummy condition and the bookkeeping
934 when jump threading is successful. */
936 static void
937 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
939 if (! walk_data->global_data)
941 gimple dummy_cond =
942 gimple_build_cond (NE_EXPR,
943 integer_zero_node, integer_zero_node,
944 NULL, NULL);
945 walk_data->global_data = dummy_cond;
948 thread_across_edge ((gimple) walk_data->global_data, e, false,
949 &const_and_copies_stack,
950 simplify_stmt_for_jump_threading);
953 /* We have finished processing the dominator children of BB, perform
954 any finalization actions in preparation for leaving this node in
955 the dominator tree. */
957 static void
958 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
960 gimple last;
962 /* If we have an outgoing edge to a block with multiple incoming and
963 outgoing edges, then we may be able to thread the edge, i.e., we
964 may be able to statically determine which of the outgoing edges
965 will be traversed when the incoming edge from BB is traversed. */
966 if (single_succ_p (bb)
967 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
968 && potentially_threadable_block (single_succ (bb)))
970 dom_thread_across_edge (walk_data, single_succ_edge (bb));
972 else if ((last = last_stmt (bb))
973 && gimple_code (last) == GIMPLE_COND
974 && EDGE_COUNT (bb->succs) == 2
975 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
976 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
978 edge true_edge, false_edge;
980 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
982 /* Only try to thread the edge if it reaches a target block with
983 more than one predecessor and more than one successor. */
984 if (potentially_threadable_block (true_edge->dest))
986 struct edge_info *edge_info;
987 unsigned int i;
989 /* Push a marker onto the available expression stack so that we
990 unwind any expressions related to the TRUE arm before processing
991 the false arm below. */
992 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
993 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
995 edge_info = (struct edge_info *) true_edge->aux;
997 /* If we have info associated with this edge, record it into
998 our equivalence tables. */
999 if (edge_info)
1001 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1002 tree lhs = edge_info->lhs;
1003 tree rhs = edge_info->rhs;
1005 /* If we have a simple NAME = VALUE equivalence, record it. */
1006 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1007 record_const_or_copy (lhs, rhs);
1009 /* If we have 0 = COND or 1 = COND equivalences, record them
1010 into our expression hash tables. */
1011 if (cond_equivalences)
1012 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1013 record_cond (&cond_equivalences[i]);
1016 dom_thread_across_edge (walk_data, true_edge);
1018 /* And restore the various tables to their state before
1019 we threaded this edge. */
1020 remove_local_expressions_from_table ();
1023 /* Similarly for the ELSE arm. */
1024 if (potentially_threadable_block (false_edge->dest))
1026 struct edge_info *edge_info;
1027 unsigned int i;
1029 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1030 edge_info = (struct edge_info *) false_edge->aux;
1032 /* If we have info associated with this edge, record it into
1033 our equivalence tables. */
1034 if (edge_info)
1036 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1037 tree lhs = edge_info->lhs;
1038 tree rhs = edge_info->rhs;
1040 /* If we have a simple NAME = VALUE equivalence, record it. */
1041 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1042 record_const_or_copy (lhs, rhs);
1044 /* If we have 0 = COND or 1 = COND equivalences, record them
1045 into our expression hash tables. */
1046 if (cond_equivalences)
1047 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1048 record_cond (&cond_equivalences[i]);
1051 /* Now thread the edge. */
1052 dom_thread_across_edge (walk_data, false_edge);
1054 /* No need to remove local expressions from our tables
1055 or restore vars to their original value as that will
1056 be done immediately below. */
1060 remove_local_expressions_from_table ();
1061 restore_vars_to_original_value ();
1063 /* If we queued any statements to rescan in this block, then
1064 go ahead and rescan them now. */
1065 while (VEC_length (gimple_p, stmts_to_rescan) > 0)
1067 gimple *stmt_p = VEC_last (gimple_p, stmts_to_rescan);
1068 gimple stmt = *stmt_p;
1069 basic_block stmt_bb = gimple_bb (stmt);
1071 if (stmt_bb != bb)
1072 break;
1074 VEC_pop (gimple_p, stmts_to_rescan);
1075 pop_stmt_changes (stmt_p);
1079 /* PHI nodes can create equivalences too.
1081 Ignoring any alternatives which are the same as the result, if
1082 all the alternatives are equal, then the PHI node creates an
1083 equivalence. */
1085 static void
1086 record_equivalences_from_phis (basic_block bb)
1088 gimple_stmt_iterator gsi;
1090 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1092 gimple phi = gsi_stmt (gsi);
1094 tree lhs = gimple_phi_result (phi);
1095 tree rhs = NULL;
1096 size_t i;
1098 for (i = 0; i < gimple_phi_num_args (phi); i++)
1100 tree t = gimple_phi_arg_def (phi, i);
1102 /* Ignore alternatives which are the same as our LHS. Since
1103 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1104 can simply compare pointers. */
1105 if (lhs == t)
1106 continue;
1108 /* If we have not processed an alternative yet, then set
1109 RHS to this alternative. */
1110 if (rhs == NULL)
1111 rhs = t;
1112 /* If we have processed an alternative (stored in RHS), then
1113 see if it is equal to this one. If it isn't, then stop
1114 the search. */
1115 else if (! operand_equal_for_phi_arg_p (rhs, t))
1116 break;
1119 /* If we had no interesting alternatives, then all the RHS alternatives
1120 must have been the same as LHS. */
1121 if (!rhs)
1122 rhs = lhs;
1124 /* If we managed to iterate through each PHI alternative without
1125 breaking out of the loop, then we have a PHI which may create
1126 a useful equivalence. We do not need to record unwind data for
1127 this, since this is a true assignment and not an equivalence
1128 inferred from a comparison. All uses of this ssa name are dominated
1129 by this assignment, so unwinding just costs time and space. */
1130 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
1131 SSA_NAME_VALUE (lhs) = rhs;
1135 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1136 return that edge. Otherwise return NULL. */
1137 static edge
1138 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1140 edge retval = NULL;
1141 edge e;
1142 edge_iterator ei;
1144 FOR_EACH_EDGE (e, ei, bb->preds)
1146 /* A loop back edge can be identified by the destination of
1147 the edge dominating the source of the edge. */
1148 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1149 continue;
1151 /* If we have already seen a non-loop edge, then we must have
1152 multiple incoming non-loop edges and thus we return NULL. */
1153 if (retval)
1154 return NULL;
1156 /* This is the first non-loop incoming edge we have found. Record
1157 it. */
1158 retval = e;
1161 return retval;
1164 /* Record any equivalences created by the incoming edge to BB. If BB
1165 has more than one incoming edge, then no equivalence is created. */
1167 static void
1168 record_equivalences_from_incoming_edge (basic_block bb)
1170 edge e;
1171 basic_block parent;
1172 struct edge_info *edge_info;
1174 /* If our parent block ended with a control statement, then we may be
1175 able to record some equivalences based on which outgoing edge from
1176 the parent was followed. */
1177 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1179 e = single_incoming_edge_ignoring_loop_edges (bb);
1181 /* If we had a single incoming edge from our parent block, then enter
1182 any data associated with the edge into our tables. */
1183 if (e && e->src == parent)
1185 unsigned int i;
1187 edge_info = (struct edge_info *) e->aux;
1189 if (edge_info)
1191 tree lhs = edge_info->lhs;
1192 tree rhs = edge_info->rhs;
1193 struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
1195 if (lhs)
1196 record_equality (lhs, rhs);
1198 if (cond_equivalences)
1199 for (i = 0; i < edge_info->max_cond_equivalences; i++)
1200 record_cond (&cond_equivalences[i]);
1205 /* Dump SSA statistics on FILE. */
1207 void
1208 dump_dominator_optimization_stats (FILE *file)
1210 fprintf (file, "Total number of statements: %6ld\n\n",
1211 opt_stats.num_stmts);
1212 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1213 opt_stats.num_exprs_considered);
1215 fprintf (file, "\nHash table statistics:\n");
1217 fprintf (file, " avail_exprs: ");
1218 htab_statistics (file, avail_exprs);
1222 /* Dump SSA statistics on stderr. */
1224 void
1225 debug_dominator_optimization_stats (void)
1227 dump_dominator_optimization_stats (stderr);
1231 /* Dump statistics for the hash table HTAB. */
1233 static void
1234 htab_statistics (FILE *file, htab_t htab)
1236 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1237 (long) htab_size (htab),
1238 (long) htab_elements (htab),
1239 htab_collisions (htab));
1243 /* Enter condition equivalence into the expression hash table.
1244 This indicates that a conditional expression has a known
1245 boolean value. */
1247 static void
1248 record_cond (struct cond_equivalence *p)
1250 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1251 void **slot;
1253 initialize_hash_element_from_expr (&p->cond, p->value, element);
1255 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1256 element->hash, INSERT);
1257 if (*slot == NULL)
1259 *slot = (void *) element;
1261 if (dump_file && (dump_flags & TDF_DETAILS))
1263 fprintf (dump_file, "1>>> ");
1264 print_expr_hash_elt (dump_file, element);
1267 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1269 else
1270 free (element);
1273 /* Build a cond_equivalence record indicating that the comparison
1274 CODE holds between operands OP0 and OP1. */
1276 static void
1277 build_and_record_new_cond (enum tree_code code,
1278 tree op0, tree op1,
1279 struct cond_equivalence *p)
1281 struct hashable_expr *cond = &p->cond;
1283 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1285 cond->type = boolean_type_node;
1286 cond->kind = EXPR_BINARY;
1287 cond->ops.binary.op = code;
1288 cond->ops.binary.opnd0 = op0;
1289 cond->ops.binary.opnd1 = op1;
1291 p->value = boolean_true_node;
1294 /* Record that COND is true and INVERTED is false into the edge information
1295 structure. Also record that any conditions dominated by COND are true
1296 as well.
1298 For example, if a < b is true, then a <= b must also be true. */
1300 static void
1301 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1303 tree op0, op1;
1305 if (!COMPARISON_CLASS_P (cond))
1306 return;
1308 op0 = TREE_OPERAND (cond, 0);
1309 op1 = TREE_OPERAND (cond, 1);
1311 switch (TREE_CODE (cond))
1313 case LT_EXPR:
1314 case GT_EXPR:
1315 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1317 edge_info->max_cond_equivalences = 6;
1318 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 6);
1319 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1320 &edge_info->cond_equivalences[4]);
1321 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1322 &edge_info->cond_equivalences[5]);
1324 else
1326 edge_info->max_cond_equivalences = 4;
1327 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1330 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1331 ? LE_EXPR : GE_EXPR),
1332 op0, op1, &edge_info->cond_equivalences[2]);
1333 build_and_record_new_cond (NE_EXPR, op0, op1,
1334 &edge_info->cond_equivalences[3]);
1335 break;
1337 case GE_EXPR:
1338 case LE_EXPR:
1339 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1341 edge_info->max_cond_equivalences = 3;
1342 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 3);
1343 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1344 &edge_info->cond_equivalences[2]);
1346 else
1348 edge_info->max_cond_equivalences = 2;
1349 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1351 break;
1353 case EQ_EXPR:
1354 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1356 edge_info->max_cond_equivalences = 5;
1357 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 5);
1358 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1359 &edge_info->cond_equivalences[4]);
1361 else
1363 edge_info->max_cond_equivalences = 4;
1364 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1366 build_and_record_new_cond (LE_EXPR, op0, op1,
1367 &edge_info->cond_equivalences[2]);
1368 build_and_record_new_cond (GE_EXPR, op0, op1,
1369 &edge_info->cond_equivalences[3]);
1370 break;
1372 case UNORDERED_EXPR:
1373 edge_info->max_cond_equivalences = 8;
1374 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 8);
1375 build_and_record_new_cond (NE_EXPR, op0, op1,
1376 &edge_info->cond_equivalences[2]);
1377 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1378 &edge_info->cond_equivalences[3]);
1379 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1380 &edge_info->cond_equivalences[4]);
1381 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1382 &edge_info->cond_equivalences[5]);
1383 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1384 &edge_info->cond_equivalences[6]);
1385 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1386 &edge_info->cond_equivalences[7]);
1387 break;
1389 case UNLT_EXPR:
1390 case UNGT_EXPR:
1391 edge_info->max_cond_equivalences = 4;
1392 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1393 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1394 ? UNLE_EXPR : UNGE_EXPR),
1395 op0, op1, &edge_info->cond_equivalences[2]);
1396 build_and_record_new_cond (NE_EXPR, op0, op1,
1397 &edge_info->cond_equivalences[3]);
1398 break;
1400 case UNEQ_EXPR:
1401 edge_info->max_cond_equivalences = 4;
1402 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1403 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1404 &edge_info->cond_equivalences[2]);
1405 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1406 &edge_info->cond_equivalences[3]);
1407 break;
1409 case LTGT_EXPR:
1410 edge_info->max_cond_equivalences = 4;
1411 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
1412 build_and_record_new_cond (NE_EXPR, op0, op1,
1413 &edge_info->cond_equivalences[2]);
1414 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1415 &edge_info->cond_equivalences[3]);
1416 break;
1418 default:
1419 edge_info->max_cond_equivalences = 2;
1420 edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
1421 break;
1424 /* Now store the original true and false conditions into the first
1425 two slots. */
1426 initialize_expr_from_cond (cond, &edge_info->cond_equivalences[0].cond);
1427 edge_info->cond_equivalences[0].value = boolean_true_node;
1429 /* It is possible for INVERTED to be the negation of a comparison,
1430 and not a valid RHS or GIMPLE_COND condition. This happens because
1431 invert_truthvalue may return such an expression when asked to invert
1432 a floating-point comparison. These comparisons are not assumed to
1433 obey the trichotomy law. */
1434 initialize_expr_from_cond (inverted, &edge_info->cond_equivalences[1].cond);
1435 edge_info->cond_equivalences[1].value = boolean_false_node;
1438 /* A helper function for record_const_or_copy and record_equality.
1439 Do the work of recording the value and undo info. */
1441 static void
1442 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1444 SSA_NAME_VALUE (x) = y;
1446 if (dump_file && (dump_flags & TDF_DETAILS))
1448 fprintf (dump_file, "0>>> COPY ");
1449 print_generic_expr (dump_file, x, 0);
1450 fprintf (dump_file, " = ");
1451 print_generic_expr (dump_file, y, 0);
1452 fprintf (dump_file, "\n");
1455 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1456 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1457 VEC_quick_push (tree, const_and_copies_stack, x);
1460 /* Return the loop depth of the basic block of the defining statement of X.
1461 This number should not be treated as absolutely correct because the loop
1462 information may not be completely up-to-date when dom runs. However, it
1463 will be relatively correct, and as more passes are taught to keep loop info
1464 up to date, the result will become more and more accurate. */
1467 loop_depth_of_name (tree x)
1469 gimple defstmt;
1470 basic_block defbb;
1472 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1473 if (TREE_CODE (x) != SSA_NAME)
1474 return 0;
1476 /* Otherwise return the loop depth of the defining statement's bb.
1477 Note that there may not actually be a bb for this statement, if the
1478 ssa_name is live on entry. */
1479 defstmt = SSA_NAME_DEF_STMT (x);
1480 defbb = gimple_bb (defstmt);
1481 if (!defbb)
1482 return 0;
1484 return defbb->loop_depth;
1487 /* Record that X is equal to Y in const_and_copies. Record undo
1488 information in the block-local vector. */
1490 static void
1491 record_const_or_copy (tree x, tree y)
1493 tree prev_x = SSA_NAME_VALUE (x);
1495 gcc_assert (TREE_CODE (x) == SSA_NAME);
1497 if (TREE_CODE (y) == SSA_NAME)
1499 tree tmp = SSA_NAME_VALUE (y);
1500 if (tmp)
1501 y = tmp;
1504 record_const_or_copy_1 (x, y, prev_x);
1507 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1508 This constrains the cases in which we may treat this as assignment. */
1510 static void
1511 record_equality (tree x, tree y)
1513 tree prev_x = NULL, prev_y = NULL;
1515 if (TREE_CODE (x) == SSA_NAME)
1516 prev_x = SSA_NAME_VALUE (x);
1517 if (TREE_CODE (y) == SSA_NAME)
1518 prev_y = SSA_NAME_VALUE (y);
1520 /* If one of the previous values is invariant, or invariant in more loops
1521 (by depth), then use that.
1522 Otherwise it doesn't matter which value we choose, just so
1523 long as we canonicalize on one value. */
1524 if (is_gimple_min_invariant (y))
1526 else if (is_gimple_min_invariant (x)
1527 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1528 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1529 else if (prev_x && is_gimple_min_invariant (prev_x))
1530 x = y, y = prev_x, prev_x = prev_y;
1531 else if (prev_y)
1532 y = prev_y;
1534 /* After the swapping, we must have one SSA_NAME. */
1535 if (TREE_CODE (x) != SSA_NAME)
1536 return;
1538 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1539 variable compared against zero. If we're honoring signed zeros,
1540 then we cannot record this value unless we know that the value is
1541 nonzero. */
1542 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1543 && (TREE_CODE (y) != REAL_CST
1544 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1545 return;
1547 record_const_or_copy_1 (x, y, prev_x);
1550 /* Returns true when STMT is a simple iv increment. It detects the
1551 following situation:
1553 i_1 = phi (..., i_2)
1554 i_2 = i_1 +/- ... */
1556 static bool
1557 simple_iv_increment_p (gimple stmt)
1559 tree lhs, preinc;
1560 gimple phi;
1561 size_t i;
1563 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1564 return false;
1566 lhs = gimple_assign_lhs (stmt);
1567 if (TREE_CODE (lhs) != SSA_NAME)
1568 return false;
1570 if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
1571 && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
1572 return false;
1574 preinc = gimple_assign_rhs1 (stmt);
1576 if (TREE_CODE (preinc) != SSA_NAME)
1577 return false;
1579 phi = SSA_NAME_DEF_STMT (preinc);
1580 if (gimple_code (phi) != GIMPLE_PHI)
1581 return false;
1583 for (i = 0; i < gimple_phi_num_args (phi); i++)
1584 if (gimple_phi_arg_def (phi, i) == lhs)
1585 return true;
1587 return false;
1590 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1591 known value for that SSA_NAME (or NULL if no value is known).
1593 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1594 successors of BB. */
1596 static void
1597 cprop_into_successor_phis (basic_block bb)
1599 edge e;
1600 edge_iterator ei;
1602 FOR_EACH_EDGE (e, ei, bb->succs)
1604 int indx;
1605 gimple_stmt_iterator gsi;
1607 /* If this is an abnormal edge, then we do not want to copy propagate
1608 into the PHI alternative associated with this edge. */
1609 if (e->flags & EDGE_ABNORMAL)
1610 continue;
1612 gsi = gsi_start_phis (e->dest);
1613 if (gsi_end_p (gsi))
1614 continue;
1616 indx = e->dest_idx;
1617 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1619 tree new_val;
1620 use_operand_p orig_p;
1621 tree orig_val;
1622 gimple phi = gsi_stmt (gsi);
1624 /* The alternative may be associated with a constant, so verify
1625 it is an SSA_NAME before doing anything with it. */
1626 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1627 orig_val = get_use_from_ptr (orig_p);
1628 if (TREE_CODE (orig_val) != SSA_NAME)
1629 continue;
1631 /* If we have *ORIG_P in our constant/copy table, then replace
1632 ORIG_P with its value in our constant/copy table. */
1633 new_val = SSA_NAME_VALUE (orig_val);
1634 if (new_val
1635 && new_val != orig_val
1636 && (TREE_CODE (new_val) == SSA_NAME
1637 || is_gimple_min_invariant (new_val))
1638 && may_propagate_copy (orig_val, new_val))
1639 propagate_value (orig_p, new_val);
1644 /* We have finished optimizing BB, record any information implied by
1645 taking a specific outgoing edge from BB. */
1647 static void
1648 record_edge_info (basic_block bb)
1650 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1651 struct edge_info *edge_info;
1653 if (! gsi_end_p (gsi))
1655 gimple stmt = gsi_stmt (gsi);
1657 if (gimple_code (stmt) == GIMPLE_SWITCH)
1659 tree index = gimple_switch_index (stmt);
1661 if (TREE_CODE (index) == SSA_NAME)
1663 int i;
1664 int n_labels = gimple_switch_num_labels (stmt);
1665 tree *info = XCNEWVEC (tree, last_basic_block);
1666 edge e;
1667 edge_iterator ei;
1669 for (i = 0; i < n_labels; i++)
1671 tree label = gimple_switch_label (stmt, i);
1672 basic_block target_bb = label_to_block (CASE_LABEL (label));
1673 if (CASE_HIGH (label)
1674 || !CASE_LOW (label)
1675 || info[target_bb->index])
1676 info[target_bb->index] = error_mark_node;
1677 else
1678 info[target_bb->index] = label;
1681 FOR_EACH_EDGE (e, ei, bb->succs)
1683 basic_block target_bb = e->dest;
1684 tree label = info[target_bb->index];
1686 if (label != NULL && label != error_mark_node)
1688 tree x = fold_convert (TREE_TYPE (index), CASE_LOW (label));
1689 edge_info = allocate_edge_info (e);
1690 edge_info->lhs = index;
1691 edge_info->rhs = x;
1694 free (info);
1698 /* A COND_EXPR may create equivalences too. */
1699 if (gimple_code (stmt) == GIMPLE_COND)
1701 edge true_edge;
1702 edge false_edge;
1704 tree op0 = gimple_cond_lhs (stmt);
1705 tree op1 = gimple_cond_rhs (stmt);
1706 enum tree_code code = gimple_cond_code (stmt);
1708 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1710 /* Special case comparing booleans against a constant as we
1711 know the value of OP0 on both arms of the branch. i.e., we
1712 can record an equivalence for OP0 rather than COND. */
1713 if ((code == EQ_EXPR || code == NE_EXPR)
1714 && TREE_CODE (op0) == SSA_NAME
1715 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1716 && is_gimple_min_invariant (op1))
1718 if (code == EQ_EXPR)
1720 edge_info = allocate_edge_info (true_edge);
1721 edge_info->lhs = op0;
1722 edge_info->rhs = (integer_zerop (op1)
1723 ? boolean_false_node
1724 : boolean_true_node);
1726 edge_info = allocate_edge_info (false_edge);
1727 edge_info->lhs = op0;
1728 edge_info->rhs = (integer_zerop (op1)
1729 ? boolean_true_node
1730 : boolean_false_node);
1732 else
1734 edge_info = allocate_edge_info (true_edge);
1735 edge_info->lhs = op0;
1736 edge_info->rhs = (integer_zerop (op1)
1737 ? boolean_true_node
1738 : boolean_false_node);
1740 edge_info = allocate_edge_info (false_edge);
1741 edge_info->lhs = op0;
1742 edge_info->rhs = (integer_zerop (op1)
1743 ? boolean_false_node
1744 : boolean_true_node);
1747 else if (is_gimple_min_invariant (op0)
1748 && (TREE_CODE (op1) == SSA_NAME
1749 || is_gimple_min_invariant (op1)))
1751 tree cond = build2 (code, boolean_type_node, op0, op1);
1752 tree inverted = invert_truthvalue (cond);
1753 struct edge_info *edge_info;
1755 edge_info = allocate_edge_info (true_edge);
1756 record_conditions (edge_info, cond, inverted);
1758 if (code == EQ_EXPR)
1760 edge_info->lhs = op1;
1761 edge_info->rhs = op0;
1764 edge_info = allocate_edge_info (false_edge);
1765 record_conditions (edge_info, inverted, cond);
1767 if (code == NE_EXPR)
1769 edge_info->lhs = op1;
1770 edge_info->rhs = op0;
1774 else if (TREE_CODE (op0) == SSA_NAME
1775 && (is_gimple_min_invariant (op1)
1776 || TREE_CODE (op1) == SSA_NAME))
1778 tree cond = build2 (code, boolean_type_node, op0, op1);
1779 tree inverted = invert_truthvalue (cond);
1780 struct edge_info *edge_info;
1782 edge_info = allocate_edge_info (true_edge);
1783 record_conditions (edge_info, cond, inverted);
1785 if (code == EQ_EXPR)
1787 edge_info->lhs = op0;
1788 edge_info->rhs = op1;
1791 edge_info = allocate_edge_info (false_edge);
1792 record_conditions (edge_info, inverted, cond);
1794 if (TREE_CODE (cond) == NE_EXPR)
1796 edge_info->lhs = op0;
1797 edge_info->rhs = op1;
1802 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1806 /* Propagate information from BB to its outgoing edges.
1808 This can include equivalence information implied by control statements
1809 at the end of BB and const/copy propagation into PHIs in BB's
1810 successor blocks. */
1812 static void
1813 propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1814 basic_block bb)
1816 record_edge_info (bb);
1817 cprop_into_successor_phis (bb);
1820 /* Search for redundant computations in STMT. If any are found, then
1821 replace them with the variable holding the result of the computation.
1823 If safe, record this expression into the available expression hash
1824 table. */
1826 static bool
1827 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1829 tree expr_type;
1830 tree cached_lhs;
1831 bool insert = true;
1832 bool retval = false;
1833 bool assigns_var_p = false;
1835 gimple stmt = gsi_stmt (*gsi);
1837 tree def = gimple_get_lhs (stmt);
1839 /* Certain expressions on the RHS can be optimized away, but can not
1840 themselves be entered into the hash tables. */
1841 if (! def
1842 || TREE_CODE (def) != SSA_NAME
1843 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1844 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)
1845 /* Do not record equivalences for increments of ivs. This would create
1846 overlapping live ranges for a very questionable gain. */
1847 || simple_iv_increment_p (stmt))
1848 insert = false;
1850 /* Check if the expression has been computed before. */
1851 cached_lhs = lookup_avail_expr (stmt, insert);
1853 opt_stats.num_exprs_considered++;
1855 /* Get the type of the expression we are trying to optimize. */
1856 if (is_gimple_assign (stmt))
1858 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1859 assigns_var_p = true;
1861 else if (gimple_code (stmt) == GIMPLE_COND)
1862 expr_type = boolean_type_node;
1863 else if (is_gimple_call (stmt))
1865 gcc_assert (gimple_call_lhs (stmt));
1866 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1867 assigns_var_p = true;
1869 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1870 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1871 else
1872 gcc_unreachable ();
1874 if (!cached_lhs)
1875 return false;
1877 /* It is safe to ignore types here since we have already done
1878 type checking in the hashing and equality routines. In fact
1879 type checking here merely gets in the way of constant
1880 propagation. Also, make sure that it is safe to propagate
1881 CACHED_LHS into the expression in STMT. */
1882 if ((TREE_CODE (cached_lhs) != SSA_NAME
1883 && (assigns_var_p
1884 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1885 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1887 #if defined ENABLE_CHECKING
1888 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1889 || is_gimple_min_invariant (cached_lhs));
1890 #endif
1892 if (dump_file && (dump_flags & TDF_DETAILS))
1894 fprintf (dump_file, " Replaced redundant expr '");
1895 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1896 fprintf (dump_file, "' with '");
1897 print_generic_expr (dump_file, cached_lhs, dump_flags);
1898 fprintf (dump_file, "'\n");
1901 opt_stats.num_re++;
1903 if (TREE_CODE (cached_lhs) == ADDR_EXPR
1904 || (POINTER_TYPE_P (expr_type)
1905 && is_gimple_min_invariant (cached_lhs)))
1906 retval = true;
1908 if (assigns_var_p
1909 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1910 cached_lhs = fold_convert (expr_type, cached_lhs);
1912 propagate_tree_value_into_stmt (gsi, cached_lhs);
1914 /* Since it is always necessary to mark the result as modified,
1915 perhaps we should move this into propagate_tree_value_into_stmt
1916 itself. */
1917 gimple_set_modified (gsi_stmt (*gsi), true);
1919 return retval;
1922 /* Return true if statement GS is an assignment that peforms a useless
1923 type conversion. It is is intended to be a tuples analog of function
1924 tree_ssa_useless_type_conversion. */
1926 static bool
1927 gimple_assign_unary_useless_conversion_p (gimple gs)
1929 if (is_gimple_assign (gs)
1930 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1931 || gimple_assign_rhs_code (gs) == VIEW_CONVERT_EXPR
1932 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR))
1934 tree lhs_type = TREE_TYPE (gimple_assign_lhs (gs));
1935 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (gs));
1936 return useless_type_conversion_p (lhs_type, rhs_type);
1939 return false;
1942 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1943 the available expressions table or the const_and_copies table.
1944 Detect and record those equivalences. */
1945 /* We handle only very simple copy equivalences here. The heavy
1946 lifing is done by eliminate_redundant_computations. */
1948 static void
1949 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1951 tree lhs;
1952 enum tree_code lhs_code;
1954 gcc_assert (is_gimple_assign (stmt));
1956 lhs = gimple_assign_lhs (stmt);
1957 lhs_code = TREE_CODE (lhs);
1959 if (lhs_code == SSA_NAME
1960 && (gimple_assign_single_p (stmt)
1961 || gimple_assign_unary_useless_conversion_p (stmt)))
1963 tree rhs = gimple_assign_rhs1 (stmt);
1965 /* Strip away any useless type conversions. */
1966 STRIP_USELESS_TYPE_CONVERSION (rhs);
1968 /* If the RHS of the assignment is a constant or another variable that
1969 may be propagated, register it in the CONST_AND_COPIES table. We
1970 do not need to record unwind data for this, since this is a true
1971 assignment and not an equivalence inferred from a comparison. All
1972 uses of this ssa name are dominated by this assignment, so unwinding
1973 just costs time and space. */
1974 if (may_optimize_p
1975 && (TREE_CODE (rhs) == SSA_NAME
1976 || is_gimple_min_invariant (rhs)))
1978 if (dump_file && (dump_flags & TDF_DETAILS))
1980 fprintf (dump_file, "==== ASGN ");
1981 print_generic_expr (dump_file, lhs, 0);
1982 fprintf (dump_file, " = ");
1983 print_generic_expr (dump_file, rhs, 0);
1984 fprintf (dump_file, "\n");
1987 SSA_NAME_VALUE (lhs) = rhs;
1991 /* A memory store, even an aliased store, creates a useful
1992 equivalence. By exchanging the LHS and RHS, creating suitable
1993 vops and recording the result in the available expression table,
1994 we may be able to expose more redundant loads. */
1995 if (!gimple_has_volatile_ops (stmt)
1996 && gimple_references_memory_p (stmt)
1997 && gimple_assign_single_p (stmt)
1998 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1999 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2000 && !is_gimple_reg (lhs))
2002 tree rhs = gimple_assign_rhs1 (stmt);
2003 gimple new_stmt;
2005 /* Build a new statement with the RHS and LHS exchanged. */
2006 if (TREE_CODE (rhs) == SSA_NAME)
2008 /* NOTE tuples. The call to gimple_build_assign below replaced
2009 a call to build_gimple_modify_stmt, which did not set the
2010 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
2011 may cause an SSA validation failure, as the LHS may be a
2012 default-initialized name and should have no definition. I'm
2013 a bit dubious of this, as the artificial statement that we
2014 generate here may in fact be ill-formed, but it is simply
2015 used as an internal device in this pass, and never becomes
2016 part of the CFG. */
2017 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2018 new_stmt = gimple_build_assign (rhs, lhs);
2019 SSA_NAME_DEF_STMT (rhs) = defstmt;
2021 else
2022 new_stmt = gimple_build_assign (rhs, lhs);
2024 create_ssa_artificial_load_stmt (new_stmt, stmt, true);
2026 /* Finally enter the statement into the available expression
2027 table. */
2028 lookup_avail_expr (new_stmt, true);
2032 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2033 CONST_AND_COPIES. */
2035 static bool
2036 cprop_operand (gimple stmt, use_operand_p op_p)
2038 bool may_have_exposed_new_symbols = false;
2039 tree val;
2040 tree op = USE_FROM_PTR (op_p);
2042 /* If the operand has a known constant value or it is known to be a
2043 copy of some other variable, use the value or copy stored in
2044 CONST_AND_COPIES. */
2045 val = SSA_NAME_VALUE (op);
2046 if (val && val != op)
2048 tree op_type, val_type;
2050 /* Do not change the base variable in the virtual operand
2051 tables. That would make it impossible to reconstruct
2052 the renamed virtual operand if we later modify this
2053 statement. Also only allow the new value to be an SSA_NAME
2054 for propagation into virtual operands. */
2055 if (!is_gimple_reg (op)
2056 && (TREE_CODE (val) != SSA_NAME
2057 || is_gimple_reg (val)
2058 || get_virtual_var (val) != get_virtual_var (op)))
2059 return false;
2061 /* Do not replace hard register operands in asm statements. */
2062 if (gimple_code (stmt) == GIMPLE_ASM
2063 && !may_propagate_copy_into_asm (op))
2064 return false;
2066 /* Get the toplevel type of each operand. */
2067 op_type = TREE_TYPE (op);
2068 val_type = TREE_TYPE (val);
2070 /* While both types are pointers, get the type of the object
2071 pointed to. */
2072 while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2074 op_type = TREE_TYPE (op_type);
2075 val_type = TREE_TYPE (val_type);
2078 /* Make sure underlying types match before propagating a constant by
2079 converting the constant to the proper type. Note that convert may
2080 return a non-gimple expression, in which case we ignore this
2081 propagation opportunity. */
2082 if (TREE_CODE (val) != SSA_NAME)
2084 if (!useless_type_conversion_p (op_type, val_type))
2086 val = fold_convert (TREE_TYPE (op), val);
2087 if (!is_gimple_min_invariant (val))
2088 return false;
2092 /* Certain operands are not allowed to be copy propagated due
2093 to their interaction with exception handling and some GCC
2094 extensions. */
2095 else if (!may_propagate_copy (op, val))
2096 return false;
2098 /* Do not propagate copies if the propagated value is at a deeper loop
2099 depth than the propagatee. Otherwise, this may move loop variant
2100 variables outside of their loops and prevent coalescing
2101 opportunities. If the value was loop invariant, it will be hoisted
2102 by LICM and exposed for copy propagation. */
2103 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2104 return false;
2106 /* Dump details. */
2107 if (dump_file && (dump_flags & TDF_DETAILS))
2109 fprintf (dump_file, " Replaced '");
2110 print_generic_expr (dump_file, op, dump_flags);
2111 fprintf (dump_file, "' with %s '",
2112 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2113 print_generic_expr (dump_file, val, dump_flags);
2114 fprintf (dump_file, "'\n");
2117 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2118 that we may have exposed a new symbol for SSA renaming. */
2119 if (TREE_CODE (val) == ADDR_EXPR
2120 || (POINTER_TYPE_P (TREE_TYPE (op))
2121 && is_gimple_min_invariant (val)))
2122 may_have_exposed_new_symbols = true;
2124 if (TREE_CODE (val) != SSA_NAME)
2125 opt_stats.num_const_prop++;
2126 else
2127 opt_stats.num_copy_prop++;
2129 propagate_value (op_p, val);
2131 /* And note that we modified this statement. This is now
2132 safe, even if we changed virtual operands since we will
2133 rescan the statement and rewrite its operands again. */
2134 gimple_set_modified (stmt, true);
2136 return may_have_exposed_new_symbols;
2139 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2140 known value for that SSA_NAME (or NULL if no value is known).
2142 Propagate values from CONST_AND_COPIES into the uses, vuses and
2143 vdef_ops of STMT. */
2145 static bool
2146 cprop_into_stmt (gimple stmt)
2148 bool may_have_exposed_new_symbols = false;
2149 use_operand_p op_p;
2150 ssa_op_iter iter;
2152 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2154 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2155 may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
2158 return may_have_exposed_new_symbols;
2161 /* Optimize the statement pointed to by iterator SI.
2163 We try to perform some simplistic global redundancy elimination and
2164 constant propagation:
2166 1- To detect global redundancy, we keep track of expressions that have
2167 been computed in this block and its dominators. If we find that the
2168 same expression is computed more than once, we eliminate repeated
2169 computations by using the target of the first one.
2171 2- Constant values and copy assignments. This is used to do very
2172 simplistic constant and copy propagation. When a constant or copy
2173 assignment is found, we map the value on the RHS of the assignment to
2174 the variable in the LHS in the CONST_AND_COPIES table. */
2176 static void
2177 optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2178 basic_block bb, gimple_stmt_iterator si)
2180 gimple stmt, old_stmt;
2181 bool may_optimize_p;
2182 bool may_have_exposed_new_symbols;
2183 bool modified_p = false;
2185 old_stmt = stmt = gsi_stmt (si);
2187 if (gimple_code (stmt) == GIMPLE_COND)
2188 canonicalize_comparison (stmt);
2190 update_stmt_if_modified (stmt);
2191 opt_stats.num_stmts++;
2192 push_stmt_changes (gsi_stmt_ptr (&si));
2194 if (dump_file && (dump_flags & TDF_DETAILS))
2196 fprintf (dump_file, "Optimizing statement ");
2197 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2200 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2201 may_have_exposed_new_symbols = cprop_into_stmt (stmt);
2203 /* If the statement has been modified with constant replacements,
2204 fold its RHS before checking for redundant computations. */
2205 if (gimple_modified_p (stmt))
2207 tree rhs = NULL;
2209 /* Try to fold the statement making sure that STMT is kept
2210 up to date. */
2211 if (fold_stmt (&si))
2213 stmt = gsi_stmt (si);
2215 if (dump_file && (dump_flags & TDF_DETAILS))
2217 fprintf (dump_file, " Folded to: ");
2218 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2222 /* We only need to consider cases that can yield a gimple operand. */
2223 if (gimple_assign_single_p (stmt))
2224 rhs = gimple_assign_rhs1 (stmt);
2225 else if (gimple_code (stmt) == GIMPLE_GOTO)
2226 rhs = gimple_goto_dest (stmt);
2227 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2228 /* This should never be an ADDR_EXPR. */
2229 rhs = gimple_switch_index (stmt);
2231 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2232 recompute_tree_invariant_for_addr_expr (rhs);
2234 /* Constant/copy propagation above may change the set of
2235 virtual operands associated with this statement. Folding
2236 may remove the need for some virtual operands.
2238 Indicate we will need to rescan and rewrite the statement. */
2239 may_have_exposed_new_symbols = true;
2240 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2241 even if fold_stmt updated the stmt already and thus cleared
2242 gimple_modified_p flag on it. */
2243 modified_p = true;
2246 /* Check for redundant computations. Do this optimization only
2247 for assignments that have no volatile ops and conditionals. */
2248 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2249 && ((is_gimple_assign (stmt)
2250 && !gimple_rhs_has_side_effects (stmt))
2251 || (is_gimple_call (stmt)
2252 && gimple_call_lhs (stmt) != NULL_TREE
2253 && !gimple_rhs_has_side_effects (stmt))
2254 || gimple_code (stmt) == GIMPLE_COND
2255 || gimple_code (stmt) == GIMPLE_SWITCH));
2257 if (may_optimize_p)
2259 may_have_exposed_new_symbols |= eliminate_redundant_computations (&si);
2260 stmt = gsi_stmt (si);
2263 /* Record any additional equivalences created by this statement. */
2264 if (is_gimple_assign (stmt))
2265 record_equivalences_from_stmt (stmt, may_optimize_p);
2267 /* If STMT is a COND_EXPR and it was modified, then we may know
2268 where it goes. If that is the case, then mark the CFG as altered.
2270 This will cause us to later call remove_unreachable_blocks and
2271 cleanup_tree_cfg when it is safe to do so. It is not safe to
2272 clean things up here since removal of edges and such can trigger
2273 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2274 the manager.
2276 That's all fine and good, except that once SSA_NAMEs are released
2277 to the manager, we must not call create_ssa_name until all references
2278 to released SSA_NAMEs have been eliminated.
2280 All references to the deleted SSA_NAMEs can not be eliminated until
2281 we remove unreachable blocks.
2283 We can not remove unreachable blocks until after we have completed
2284 any queued jump threading.
2286 We can not complete any queued jump threads until we have taken
2287 appropriate variables out of SSA form. Taking variables out of
2288 SSA form can call create_ssa_name and thus we lose.
2290 Ultimately I suspect we're going to need to change the interface
2291 into the SSA_NAME manager. */
2292 if (gimple_modified_p (stmt) || modified_p)
2294 tree val = NULL;
2296 if (gimple_code (stmt) == GIMPLE_COND)
2297 val = fold_binary (gimple_cond_code (stmt), boolean_type_node,
2298 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2299 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2300 val = gimple_switch_index (stmt);
2302 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2303 cfg_altered = true;
2305 /* If we simplified a statement in such a way as to be shown that it
2306 cannot trap, update the eh information and the cfg to match. */
2307 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2309 bitmap_set_bit (need_eh_cleanup, bb->index);
2310 if (dump_file && (dump_flags & TDF_DETAILS))
2311 fprintf (dump_file, " Flagged to clear EH edges.\n");
2315 if (may_have_exposed_new_symbols)
2317 /* Queue the statement to be re-scanned after all the
2318 AVAIL_EXPRS have been processed. The change buffer stack for
2319 all the pushed statements will be processed when this queue
2320 is emptied. */
2321 VEC_safe_push (gimple_p, heap, stmts_to_rescan, gsi_stmt_ptr (&si));
2323 else
2325 /* Otherwise, just discard the recently pushed change buffer. If
2326 not, the STMTS_TO_RESCAN queue will get out of synch with the
2327 change buffer stack. */
2328 discard_stmt_changes (gsi_stmt_ptr (&si));
2332 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2333 If found, return its LHS. Otherwise insert STMT in the table and
2334 return NULL_TREE.
2336 Also, when an expression is first inserted in the table, it is also
2337 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2338 we finish processing this block and its children. */
2340 static tree
2341 lookup_avail_expr (gimple stmt, bool insert)
2343 void **slot;
2344 tree lhs;
2345 tree temp;
2346 struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
2348 /* Get LHS of assignment or call, else NULL_TREE. */
2349 lhs = gimple_get_lhs (stmt);
2351 initialize_hash_element (stmt, lhs, element);
2353 if (dump_file && (dump_flags & TDF_DETAILS))
2355 fprintf (dump_file, "LKUP ");
2356 print_expr_hash_elt (dump_file, element);
2359 /* Don't bother remembering constant assignments and copy operations.
2360 Constants and copy operations are handled by the constant/copy propagator
2361 in optimize_stmt. */
2362 if (element->expr.kind == EXPR_SINGLE
2363 && (TREE_CODE (element->expr.ops.single.rhs) == SSA_NAME
2364 || is_gimple_min_invariant (element->expr.ops.single.rhs)))
2366 free (element);
2367 return NULL_TREE;
2370 /* Finally try to find the expression in the main expression hash table. */
2371 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
2372 (insert ? INSERT : NO_INSERT));
2373 if (slot == NULL)
2375 free (element);
2376 return NULL_TREE;
2379 if (*slot == NULL)
2381 *slot = (void *) element;
2383 if (dump_file && (dump_flags & TDF_DETAILS))
2385 fprintf (dump_file, "2>>> ");
2386 print_expr_hash_elt (dump_file, element);
2389 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
2390 return NULL_TREE;
2393 /* Extract the LHS of the assignment so that it can be used as the current
2394 definition of another variable. */
2395 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2397 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2398 use the value from the const_and_copies table. */
2399 if (TREE_CODE (lhs) == SSA_NAME)
2401 temp = SSA_NAME_VALUE (lhs);
2402 if (temp)
2403 lhs = temp;
2406 free (element);
2408 if (dump_file && (dump_flags & TDF_DETAILS))
2410 fprintf (dump_file, "FIND: ");
2411 print_generic_expr (dump_file, lhs, 0);
2412 fprintf (dump_file, "\n");
2415 return lhs;
2418 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2419 for expressions using the code of the expression and the SSA numbers of
2420 its operands. */
2422 static hashval_t
2423 avail_expr_hash (const void *p)
2425 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2426 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2427 tree vuse;
2428 ssa_op_iter iter;
2429 hashval_t val = 0;
2431 val = iterative_hash_hashable_expr (expr, val);
2433 /* If the hash table entry is not associated with a statement, then we
2434 can just hash the expression and not worry about virtual operands
2435 and such. */
2436 if (!stmt)
2437 return val;
2439 /* Add the SSA version numbers of every vuse operand. This is important
2440 because compound variables like arrays are not renamed in the
2441 operands. Rather, the rename is done on the virtual variable
2442 representing all the elements of the array. */
2443 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
2444 val = iterative_hash_expr (vuse, val);
2446 return val;
2449 static hashval_t
2450 real_avail_expr_hash (const void *p)
2452 return ((const struct expr_hash_elt *)p)->hash;
2455 static int
2456 avail_expr_eq (const void *p1, const void *p2)
2458 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2459 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2460 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2461 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2462 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2463 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2465 /* This case should apply only when removing entries from the table. */
2466 if (stamp1 == stamp2)
2467 return true;
2469 /* FIXME tuples:
2470 We add stmts to a hash table and them modify them. To detect the case
2471 that we modify a stmt and then search for it, we assume that the hash
2472 is always modified by that change.
2473 We have to fully check why this doesn't happen on trunk or rewrite
2474 this in a more reliable (and easier to understand) way. */
2475 if (((const struct expr_hash_elt *)p1)->hash
2476 != ((const struct expr_hash_elt *)p2)->hash)
2477 return false;
2479 /* In case of a collision, both RHS have to be identical and have the
2480 same VUSE operands. */
2481 if (hashable_expr_equal_p (expr1, expr2)
2482 && types_compatible_p (expr1->type, expr2->type))
2484 /* Note that STMT1 and/or STMT2 may be NULL. */
2485 bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
2486 return ret;
2489 return false;
2492 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2493 up degenerate PHIs created by or exposed by jump threading. */
2495 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2496 NULL. */
2498 static tree
2499 degenerate_phi_result (gimple phi)
2501 tree lhs = gimple_phi_result (phi);
2502 tree val = NULL;
2503 size_t i;
2505 /* Ignoring arguments which are the same as LHS, if all the remaining
2506 arguments are the same, then the PHI is a degenerate and has the
2507 value of that common argument. */
2508 for (i = 0; i < gimple_phi_num_args (phi); i++)
2510 tree arg = gimple_phi_arg_def (phi, i);
2512 if (arg == lhs)
2513 continue;
2514 else if (!val)
2515 val = arg;
2516 else if (!operand_equal_p (arg, val, 0))
2517 break;
2519 return (i == gimple_phi_num_args (phi) ? val : NULL);
2522 /* Given a statement STMT, which is either a PHI node or an assignment,
2523 remove it from the IL. */
2525 static void
2526 remove_stmt_or_phi (gimple stmt)
2528 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2530 if (gimple_code (stmt) == GIMPLE_PHI)
2531 remove_phi_node (&gsi, true);
2532 else
2534 gsi_remove (&gsi, true);
2535 release_defs (stmt);
2539 /* Given a statement STMT, which is either a PHI node or an assignment,
2540 return the "rhs" of the node, in the case of a non-degenerate
2541 phi, NULL is returned. */
2543 static tree
2544 get_rhs_or_phi_arg (gimple stmt)
2546 if (gimple_code (stmt) == GIMPLE_PHI)
2547 return degenerate_phi_result (stmt);
2548 else if (gimple_assign_single_p (stmt))
2549 return gimple_assign_rhs1 (stmt);
2550 else
2551 gcc_unreachable ();
2555 /* Given a statement STMT, which is either a PHI node or an assignment,
2556 return the "lhs" of the node. */
2558 static tree
2559 get_lhs_or_phi_result (gimple stmt)
2561 if (gimple_code (stmt) == GIMPLE_PHI)
2562 return gimple_phi_result (stmt);
2563 else if (is_gimple_assign (stmt))
2564 return gimple_assign_lhs (stmt);
2565 else
2566 gcc_unreachable ();
2569 /* Propagate RHS into all uses of LHS (when possible).
2571 RHS and LHS are derived from STMT, which is passed in solely so
2572 that we can remove it if propagation is successful.
2574 When propagating into a PHI node or into a statement which turns
2575 into a trivial copy or constant initialization, set the
2576 appropriate bit in INTERESTING_NAMEs so that we will visit those
2577 nodes as well in an effort to pick up secondary optimization
2578 opportunities. */
2580 static void
2581 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2583 /* First verify that propagation is valid and isn't going to move a
2584 loop variant variable outside its loop. */
2585 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2586 && (TREE_CODE (rhs) != SSA_NAME
2587 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2588 && may_propagate_copy (lhs, rhs)
2589 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2591 use_operand_p use_p;
2592 imm_use_iterator iter;
2593 gimple use_stmt;
2594 bool all = true;
2596 /* Dump details. */
2597 if (dump_file && (dump_flags & TDF_DETAILS))
2599 fprintf (dump_file, " Replacing '");
2600 print_generic_expr (dump_file, lhs, dump_flags);
2601 fprintf (dump_file, "' with %s '",
2602 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2603 print_generic_expr (dump_file, rhs, dump_flags);
2604 fprintf (dump_file, "'\n");
2607 /* Walk over every use of LHS and try to replace the use with RHS.
2608 At this point the only reason why such a propagation would not
2609 be successful would be if the use occurs in an ASM_EXPR. */
2610 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2613 /* It's not always safe to propagate into an ASM_EXPR. */
2614 if (gimple_code (use_stmt) == GIMPLE_ASM
2615 && ! may_propagate_copy_into_asm (lhs))
2617 all = false;
2618 continue;
2621 /* Dump details. */
2622 if (dump_file && (dump_flags & TDF_DETAILS))
2624 fprintf (dump_file, " Original statement:");
2625 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2628 push_stmt_changes (&use_stmt);
2630 /* Propagate the RHS into this use of the LHS. */
2631 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2632 propagate_value (use_p, rhs);
2634 /* Special cases to avoid useless calls into the folding
2635 routines, operand scanning, etc.
2637 First, propagation into a PHI may cause the PHI to become
2638 a degenerate, so mark the PHI as interesting. No other
2639 actions are necessary.
2641 Second, if we're propagating a virtual operand and the
2642 propagation does not change the underlying _DECL node for
2643 the virtual operand, then no further actions are necessary. */
2644 if (gimple_code (use_stmt) == GIMPLE_PHI
2645 || (! is_gimple_reg (lhs)
2646 && TREE_CODE (rhs) == SSA_NAME
2647 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2649 /* Dump details. */
2650 if (dump_file && (dump_flags & TDF_DETAILS))
2652 fprintf (dump_file, " Updated statement:");
2653 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2656 /* Propagation into a PHI may expose new degenerate PHIs,
2657 so mark the result of the PHI as interesting. */
2658 if (gimple_code (use_stmt) == GIMPLE_PHI)
2660 tree result = get_lhs_or_phi_result (use_stmt);
2661 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2664 discard_stmt_changes (&use_stmt);
2665 continue;
2668 /* From this point onward we are propagating into a
2669 real statement. Folding may (or may not) be possible,
2670 we may expose new operands, expose dead EH edges,
2671 etc. */
2672 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2673 cannot fold a call that simplifies to a constant,
2674 because the GIMPLE_CALL must be replaced by a
2675 GIMPLE_ASSIGN, and there is no way to effect such a
2676 transformation in-place. We might want to consider
2677 using the more general fold_stmt here. */
2678 fold_stmt_inplace (use_stmt);
2680 /* Sometimes propagation can expose new operands to the
2681 renamer. Note this will call update_stmt at the
2682 appropriate time. */
2683 pop_stmt_changes (&use_stmt);
2685 /* Dump details. */
2686 if (dump_file && (dump_flags & TDF_DETAILS))
2688 fprintf (dump_file, " Updated statement:");
2689 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2692 /* If we replaced a variable index with a constant, then
2693 we would need to update the invariant flag for ADDR_EXPRs. */
2694 if (gimple_assign_single_p (use_stmt)
2695 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2696 recompute_tree_invariant_for_addr_expr
2697 (gimple_assign_rhs1 (use_stmt));
2699 /* If we cleaned up EH information from the statement,
2700 mark its containing block as needing EH cleanups. */
2701 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2703 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2704 if (dump_file && (dump_flags & TDF_DETAILS))
2705 fprintf (dump_file, " Flagged to clear EH edges.\n");
2708 /* Propagation may expose new trivial copy/constant propagation
2709 opportunities. */
2710 if (gimple_assign_single_p (use_stmt)
2711 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2712 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2713 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2715 tree result = get_lhs_or_phi_result (use_stmt);
2716 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2719 /* Propagation into these nodes may make certain edges in
2720 the CFG unexecutable. We want to identify them as PHI nodes
2721 at the destination of those unexecutable edges may become
2722 degenerates. */
2723 else if (gimple_code (use_stmt) == GIMPLE_COND
2724 || gimple_code (use_stmt) == GIMPLE_SWITCH
2725 || gimple_code (use_stmt) == GIMPLE_GOTO)
2727 tree val;
2729 if (gimple_code (use_stmt) == GIMPLE_COND)
2730 val = fold_binary (gimple_cond_code (use_stmt),
2731 boolean_type_node,
2732 gimple_cond_lhs (use_stmt),
2733 gimple_cond_rhs (use_stmt));
2734 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2735 val = gimple_switch_index (use_stmt);
2736 else
2737 val = gimple_goto_dest (use_stmt);
2739 if (val && is_gimple_min_invariant (val))
2741 basic_block bb = gimple_bb (use_stmt);
2742 edge te = find_taken_edge (bb, val);
2743 edge_iterator ei;
2744 edge e;
2745 gimple_stmt_iterator gsi, psi;
2747 /* Remove all outgoing edges except TE. */
2748 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2750 if (e != te)
2752 /* Mark all the PHI nodes at the destination of
2753 the unexecutable edge as interesting. */
2754 for (psi = gsi_start_phis (e->dest);
2755 !gsi_end_p (psi);
2756 gsi_next (&psi))
2758 gimple phi = gsi_stmt (psi);
2760 tree result = gimple_phi_result (phi);
2761 int version = SSA_NAME_VERSION (result);
2763 bitmap_set_bit (interesting_names, version);
2766 te->probability += e->probability;
2768 te->count += e->count;
2769 remove_edge (e);
2770 cfg_altered = true;
2772 else
2773 ei_next (&ei);
2776 gsi = gsi_last_bb (gimple_bb (use_stmt));
2777 gsi_remove (&gsi, true);
2779 /* And fixup the flags on the single remaining edge. */
2780 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2781 te->flags &= ~EDGE_ABNORMAL;
2782 te->flags |= EDGE_FALLTHRU;
2783 if (te->probability > REG_BR_PROB_BASE)
2784 te->probability = REG_BR_PROB_BASE;
2789 /* Ensure there is nothing else to do. */
2790 gcc_assert (!all || has_zero_uses (lhs));
2792 /* If we were able to propagate away all uses of LHS, then
2793 we can remove STMT. */
2794 if (all)
2795 remove_stmt_or_phi (stmt);
2799 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2800 a statement that is a trivial copy or constant initialization.
2802 Attempt to eliminate T by propagating its RHS into all uses of
2803 its LHS. This may in turn set new bits in INTERESTING_NAMES
2804 for nodes we want to revisit later.
2806 All exit paths should clear INTERESTING_NAMES for the result
2807 of STMT. */
2809 static void
2810 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2812 tree lhs = get_lhs_or_phi_result (stmt);
2813 tree rhs;
2814 int version = SSA_NAME_VERSION (lhs);
2816 /* If the LHS of this statement or PHI has no uses, then we can
2817 just eliminate it. This can occur if, for example, the PHI
2818 was created by block duplication due to threading and its only
2819 use was in the conditional at the end of the block which was
2820 deleted. */
2821 if (has_zero_uses (lhs))
2823 bitmap_clear_bit (interesting_names, version);
2824 remove_stmt_or_phi (stmt);
2825 return;
2828 /* Get the RHS of the assignment or PHI node if the PHI is a
2829 degenerate. */
2830 rhs = get_rhs_or_phi_arg (stmt);
2831 if (!rhs)
2833 bitmap_clear_bit (interesting_names, version);
2834 return;
2837 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2839 /* Note that STMT may well have been deleted by now, so do
2840 not access it, instead use the saved version # to clear
2841 T's entry in the worklist. */
2842 bitmap_clear_bit (interesting_names, version);
2845 /* The first phase in degenerate PHI elimination.
2847 Eliminate the degenerate PHIs in BB, then recurse on the
2848 dominator children of BB. */
2850 static void
2851 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2853 gimple_stmt_iterator gsi;
2854 basic_block son;
2856 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2858 gimple phi = gsi_stmt (gsi);
2860 eliminate_const_or_copy (phi, interesting_names);
2863 /* Recurse into the dominator children of BB. */
2864 for (son = first_dom_son (CDI_DOMINATORS, bb);
2865 son;
2866 son = next_dom_son (CDI_DOMINATORS, son))
2867 eliminate_degenerate_phis_1 (son, interesting_names);
2871 /* A very simple pass to eliminate degenerate PHI nodes from the
2872 IL. This is meant to be fast enough to be able to be run several
2873 times in the optimization pipeline.
2875 Certain optimizations, particularly those which duplicate blocks
2876 or remove edges from the CFG can create or expose PHIs which are
2877 trivial copies or constant initializations.
2879 While we could pick up these optimizations in DOM or with the
2880 combination of copy-prop and CCP, those solutions are far too
2881 heavy-weight for our needs.
2883 This implementation has two phases so that we can efficiently
2884 eliminate the first order degenerate PHIs and second order
2885 degenerate PHIs.
2887 The first phase performs a dominator walk to identify and eliminate
2888 the vast majority of the degenerate PHIs. When a degenerate PHI
2889 is identified and eliminated any affected statements or PHIs
2890 are put on a worklist.
2892 The second phase eliminates degenerate PHIs and trivial copies
2893 or constant initializations using the worklist. This is how we
2894 pick up the secondary optimization opportunities with minimal
2895 cost. */
2897 static unsigned int
2898 eliminate_degenerate_phis (void)
2900 bitmap interesting_names;
2901 bitmap interesting_names1;
2903 /* Bitmap of blocks which need EH information updated. We can not
2904 update it on-the-fly as doing so invalidates the dominator tree. */
2905 need_eh_cleanup = BITMAP_ALLOC (NULL);
2907 /* INTERESTING_NAMES is effectively our worklist, indexed by
2908 SSA_NAME_VERSION.
2910 A set bit indicates that the statement or PHI node which
2911 defines the SSA_NAME should be (re)examined to determine if
2912 it has become a degenerate PHI or trivial const/copy propagation
2913 opportunity.
2915 Experiments have show we generally get better compilation
2916 time behavior with bitmaps rather than sbitmaps. */
2917 interesting_names = BITMAP_ALLOC (NULL);
2918 interesting_names1 = BITMAP_ALLOC (NULL);
2920 calculate_dominance_info (CDI_DOMINATORS);
2921 cfg_altered = false;
2923 /* First phase. Eliminate degenerate PHIs via a dominator
2924 walk of the CFG.
2926 Experiments have indicated that we generally get better
2927 compile-time behavior by visiting blocks in the first
2928 phase in dominator order. Presumably this is because walking
2929 in dominator order leaves fewer PHIs for later examination
2930 by the worklist phase. */
2931 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2933 /* Second phase. Eliminate second order degenerate PHIs as well
2934 as trivial copies or constant initializations identified by
2935 the first phase or this phase. Basically we keep iterating
2936 until our set of INTERESTING_NAMEs is empty. */
2937 while (!bitmap_empty_p (interesting_names))
2939 unsigned int i;
2940 bitmap_iterator bi;
2942 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2943 changed during the loop. Copy it to another bitmap and
2944 use that. */
2945 bitmap_copy (interesting_names1, interesting_names);
2947 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2949 tree name = ssa_name (i);
2951 /* Ignore SSA_NAMEs that have been released because
2952 their defining statement was deleted (unreachable). */
2953 if (name)
2954 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2955 interesting_names);
2959 if (cfg_altered)
2960 free_dominance_info (CDI_DOMINATORS);
2962 /* Propagation of const and copies may make some EH edges dead. Purge
2963 such edges from the CFG as needed. */
2964 if (!bitmap_empty_p (need_eh_cleanup))
2966 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2967 BITMAP_FREE (need_eh_cleanup);
2970 BITMAP_FREE (interesting_names);
2971 BITMAP_FREE (interesting_names1);
2972 return 0;
2975 struct gimple_opt_pass pass_phi_only_cprop =
2978 GIMPLE_PASS,
2979 "phicprop", /* name */
2980 gate_dominator, /* gate */
2981 eliminate_degenerate_phis, /* execute */
2982 NULL, /* sub */
2983 NULL, /* next */
2984 0, /* static_pass_number */
2985 TV_TREE_PHI_CPROP, /* tv_id */
2986 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2987 0, /* properties_provided */
2988 0, /* properties_destroyed */
2989 0, /* todo_flags_start */
2990 TODO_cleanup_cfg
2991 | TODO_dump_func
2992 | TODO_ggc_collect
2993 | TODO_verify_ssa
2994 | TODO_verify_stmts
2995 | TODO_update_ssa /* todo_flags_finish */