2012-01-10 Tom de Vries <tom@codesourcery.com>
[official-gcc.git] / gcc / tree-ssa-dom.c
bloba9a658f2c44b9a51032d8cfe41baebef675e3407
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "output.h"
32 #include "function.h"
33 #include "tree-pretty-print.h"
34 #include "gimple-pretty-print.h"
35 #include "timevar.h"
36 #include "tree-dump.h"
37 #include "tree-flow.h"
38 #include "domwalk.h"
39 #include "tree-pass.h"
40 #include "tree-ssa-propagate.h"
41 #include "langhooks.h"
42 #include "params.h"
44 /* This file implements optimizations on the dominator tree. */
46 /* Representation of a "naked" right-hand-side expression, to be used
47 in recording available expressions in the expression hash table. */
49 enum expr_kind
51 EXPR_SINGLE,
52 EXPR_UNARY,
53 EXPR_BINARY,
54 EXPR_TERNARY,
55 EXPR_CALL,
56 EXPR_PHI
59 struct hashable_expr
61 tree type;
62 enum expr_kind kind;
63 union {
64 struct { tree rhs; } single;
65 struct { enum tree_code op; tree opnd; } unary;
66 struct { enum tree_code op; tree opnd0, opnd1; } binary;
67 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
68 struct { gimple fn_from; bool pure; size_t nargs; tree *args; } call;
69 struct { size_t nargs; tree *args; } phi;
70 } ops;
73 /* Structure for recording known values of a conditional expression
74 at the exits from its block. */
76 typedef struct cond_equivalence_s
78 struct hashable_expr cond;
79 tree value;
80 } cond_equivalence;
82 DEF_VEC_O(cond_equivalence);
83 DEF_VEC_ALLOC_O(cond_equivalence,heap);
85 /* Structure for recording edge equivalences as well as any pending
86 edge redirections during the dominator optimizer.
88 Computing and storing the edge equivalences instead of creating
89 them on-demand can save significant amounts of time, particularly
90 for pathological cases involving switch statements.
92 These structures live for a single iteration of the dominator
93 optimizer in the edge's AUX field. At the end of an iteration we
94 free each of these structures and update the AUX field to point
95 to any requested redirection target (the code for updating the
96 CFG and SSA graph for edge redirection expects redirection edge
97 targets to be in the AUX field for each edge. */
99 struct edge_info
101 /* If this edge creates a simple equivalence, the LHS and RHS of
102 the equivalence will be stored here. */
103 tree lhs;
104 tree rhs;
106 /* Traversing an edge may also indicate one or more particular conditions
107 are true or false. */
108 VEC(cond_equivalence, heap) *cond_equivalences;
111 /* Hash table with expressions made available during the renaming process.
112 When an assignment of the form X_i = EXPR is found, the statement is
113 stored in this table. If the same expression EXPR is later found on the
114 RHS of another statement, it is replaced with X_i (thus performing
115 global redundancy elimination). Similarly as we pass through conditionals
116 we record the conditional itself as having either a true or false value
117 in this table. */
118 static htab_t avail_exprs;
120 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
121 expressions it enters into the hash table along with a marker entry
122 (null). When we finish processing the block, we pop off entries and
123 remove the expressions from the global hash table until we hit the
124 marker. */
125 typedef struct expr_hash_elt * expr_hash_elt_t;
126 DEF_VEC_P(expr_hash_elt_t);
127 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
129 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
131 /* Structure for entries in the expression hash table. */
133 struct expr_hash_elt
135 /* The value (lhs) of this expression. */
136 tree lhs;
138 /* The expression (rhs) we want to record. */
139 struct hashable_expr expr;
141 /* The stmt pointer if this element corresponds to a statement. */
142 gimple stmt;
144 /* The hash value for RHS. */
145 hashval_t hash;
147 /* A unique stamp, typically the address of the hash
148 element itself, used in removing entries from the table. */
149 struct expr_hash_elt *stamp;
152 /* Stack of dest,src pairs that need to be restored during finalization.
154 A NULL entry is used to mark the end of pairs which need to be
155 restored during finalization of this block. */
156 static VEC(tree,heap) *const_and_copies_stack;
158 /* Track whether or not we have changed the control flow graph. */
159 static bool cfg_altered;
161 /* Bitmap of blocks that have had EH statements cleaned. We should
162 remove their dead edges eventually. */
163 static bitmap need_eh_cleanup;
165 /* Statistics for dominator optimizations. */
166 struct opt_stats_d
168 long num_stmts;
169 long num_exprs_considered;
170 long num_re;
171 long num_const_prop;
172 long num_copy_prop;
175 static struct opt_stats_d opt_stats;
177 /* Local functions. */
178 static void optimize_stmt (basic_block, gimple_stmt_iterator);
179 static tree lookup_avail_expr (gimple, bool);
180 static hashval_t avail_expr_hash (const void *);
181 static hashval_t real_avail_expr_hash (const void *);
182 static int avail_expr_eq (const void *, const void *);
183 static void htab_statistics (FILE *, htab_t);
184 static void record_cond (cond_equivalence *);
185 static void record_const_or_copy (tree, tree);
186 static void record_equality (tree, tree);
187 static void record_equivalences_from_phis (basic_block);
188 static void record_equivalences_from_incoming_edge (basic_block);
189 static void eliminate_redundant_computations (gimple_stmt_iterator *);
190 static void record_equivalences_from_stmt (gimple, int);
191 static void dom_thread_across_edge (struct dom_walk_data *, edge);
192 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
193 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
194 static void remove_local_expressions_from_table (void);
195 static void restore_vars_to_original_value (void);
196 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
199 /* Given a statement STMT, initialize the hash table element pointed to
200 by ELEMENT. */
202 static void
203 initialize_hash_element (gimple stmt, tree lhs,
204 struct expr_hash_elt *element)
206 enum gimple_code code = gimple_code (stmt);
207 struct hashable_expr *expr = &element->expr;
209 if (code == GIMPLE_ASSIGN)
211 enum tree_code subcode = gimple_assign_rhs_code (stmt);
213 switch (get_gimple_rhs_class (subcode))
215 case GIMPLE_SINGLE_RHS:
216 expr->kind = EXPR_SINGLE;
217 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
218 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
219 break;
220 case GIMPLE_UNARY_RHS:
221 expr->kind = EXPR_UNARY;
222 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
223 expr->ops.unary.op = subcode;
224 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
225 break;
226 case GIMPLE_BINARY_RHS:
227 expr->kind = EXPR_BINARY;
228 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
229 expr->ops.binary.op = subcode;
230 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
231 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
232 break;
233 case GIMPLE_TERNARY_RHS:
234 expr->kind = EXPR_TERNARY;
235 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
236 expr->ops.ternary.op = subcode;
237 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
238 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
239 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
240 break;
241 default:
242 gcc_unreachable ();
245 else if (code == GIMPLE_COND)
247 expr->type = boolean_type_node;
248 expr->kind = EXPR_BINARY;
249 expr->ops.binary.op = gimple_cond_code (stmt);
250 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
251 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
253 else if (code == GIMPLE_CALL)
255 size_t nargs = gimple_call_num_args (stmt);
256 size_t i;
258 gcc_assert (gimple_call_lhs (stmt));
260 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
261 expr->kind = EXPR_CALL;
262 expr->ops.call.fn_from = stmt;
264 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
265 expr->ops.call.pure = true;
266 else
267 expr->ops.call.pure = false;
269 expr->ops.call.nargs = nargs;
270 expr->ops.call.args = XCNEWVEC (tree, nargs);
271 for (i = 0; i < nargs; i++)
272 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
274 else if (code == GIMPLE_SWITCH)
276 expr->type = TREE_TYPE (gimple_switch_index (stmt));
277 expr->kind = EXPR_SINGLE;
278 expr->ops.single.rhs = gimple_switch_index (stmt);
280 else if (code == GIMPLE_GOTO)
282 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
283 expr->kind = EXPR_SINGLE;
284 expr->ops.single.rhs = gimple_goto_dest (stmt);
286 else if (code == GIMPLE_PHI)
288 size_t nargs = gimple_phi_num_args (stmt);
289 size_t i;
291 expr->type = TREE_TYPE (gimple_phi_result (stmt));
292 expr->kind = EXPR_PHI;
293 expr->ops.phi.nargs = nargs;
294 expr->ops.phi.args = XCNEWVEC (tree, nargs);
296 for (i = 0; i < nargs; i++)
297 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
299 else
300 gcc_unreachable ();
302 element->lhs = lhs;
303 element->stmt = stmt;
304 element->hash = avail_expr_hash (element);
305 element->stamp = element;
308 /* Given a conditional expression COND as a tree, initialize
309 a hashable_expr expression EXPR. The conditional must be a
310 comparison or logical negation. A constant or a variable is
311 not permitted. */
313 static void
314 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
316 expr->type = boolean_type_node;
318 if (COMPARISON_CLASS_P (cond))
320 expr->kind = EXPR_BINARY;
321 expr->ops.binary.op = TREE_CODE (cond);
322 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
323 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
325 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
327 expr->kind = EXPR_UNARY;
328 expr->ops.unary.op = TRUTH_NOT_EXPR;
329 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
331 else
332 gcc_unreachable ();
335 /* Given a hashable_expr expression EXPR and an LHS,
336 initialize the hash table element pointed to by ELEMENT. */
338 static void
339 initialize_hash_element_from_expr (struct hashable_expr *expr,
340 tree lhs,
341 struct expr_hash_elt *element)
343 element->expr = *expr;
344 element->lhs = lhs;
345 element->stmt = NULL;
346 element->hash = avail_expr_hash (element);
347 element->stamp = element;
350 /* Compare two hashable_expr structures for equivalence.
351 They are considered equivalent when the the expressions
352 they denote must necessarily be equal. The logic is intended
353 to follow that of operand_equal_p in fold-const.c */
355 static bool
356 hashable_expr_equal_p (const struct hashable_expr *expr0,
357 const struct hashable_expr *expr1)
359 tree type0 = expr0->type;
360 tree type1 = expr1->type;
362 /* If either type is NULL, there is nothing to check. */
363 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
364 return false;
366 /* If both types don't have the same signedness, precision, and mode,
367 then we can't consider them equal. */
368 if (type0 != type1
369 && (TREE_CODE (type0) == ERROR_MARK
370 || TREE_CODE (type1) == ERROR_MARK
371 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
372 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
373 || TYPE_MODE (type0) != TYPE_MODE (type1)))
374 return false;
376 if (expr0->kind != expr1->kind)
377 return false;
379 switch (expr0->kind)
381 case EXPR_SINGLE:
382 return operand_equal_p (expr0->ops.single.rhs,
383 expr1->ops.single.rhs, 0);
385 case EXPR_UNARY:
386 if (expr0->ops.unary.op != expr1->ops.unary.op)
387 return false;
389 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
390 || expr0->ops.unary.op == NON_LVALUE_EXPR)
391 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
392 return false;
394 return operand_equal_p (expr0->ops.unary.opnd,
395 expr1->ops.unary.opnd, 0);
397 case EXPR_BINARY:
398 if (expr0->ops.binary.op != expr1->ops.binary.op)
399 return false;
401 if (operand_equal_p (expr0->ops.binary.opnd0,
402 expr1->ops.binary.opnd0, 0)
403 && operand_equal_p (expr0->ops.binary.opnd1,
404 expr1->ops.binary.opnd1, 0))
405 return true;
407 /* For commutative ops, allow the other order. */
408 return (commutative_tree_code (expr0->ops.binary.op)
409 && operand_equal_p (expr0->ops.binary.opnd0,
410 expr1->ops.binary.opnd1, 0)
411 && operand_equal_p (expr0->ops.binary.opnd1,
412 expr1->ops.binary.opnd0, 0));
414 case EXPR_TERNARY:
415 if (expr0->ops.ternary.op != expr1->ops.ternary.op
416 || !operand_equal_p (expr0->ops.ternary.opnd2,
417 expr1->ops.ternary.opnd2, 0))
418 return false;
420 if (operand_equal_p (expr0->ops.ternary.opnd0,
421 expr1->ops.ternary.opnd0, 0)
422 && operand_equal_p (expr0->ops.ternary.opnd1,
423 expr1->ops.ternary.opnd1, 0))
424 return true;
426 /* For commutative ops, allow the other order. */
427 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
428 && operand_equal_p (expr0->ops.ternary.opnd0,
429 expr1->ops.ternary.opnd1, 0)
430 && operand_equal_p (expr0->ops.ternary.opnd1,
431 expr1->ops.ternary.opnd0, 0));
433 case EXPR_CALL:
435 size_t i;
437 /* If the calls are to different functions, then they
438 clearly cannot be equal. */
439 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
440 expr1->ops.call.fn_from))
441 return false;
443 if (! expr0->ops.call.pure)
444 return false;
446 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
447 return false;
449 for (i = 0; i < expr0->ops.call.nargs; i++)
450 if (! operand_equal_p (expr0->ops.call.args[i],
451 expr1->ops.call.args[i], 0))
452 return false;
454 return true;
457 case EXPR_PHI:
459 size_t i;
461 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
462 return false;
464 for (i = 0; i < expr0->ops.phi.nargs; i++)
465 if (! operand_equal_p (expr0->ops.phi.args[i],
466 expr1->ops.phi.args[i], 0))
467 return false;
469 return true;
472 default:
473 gcc_unreachable ();
477 /* Compute a hash value for a hashable_expr value EXPR and a
478 previously accumulated hash value VAL. If two hashable_expr
479 values compare equal with hashable_expr_equal_p, they must
480 hash to the same value, given an identical value of VAL.
481 The logic is intended to follow iterative_hash_expr in tree.c. */
483 static hashval_t
484 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
486 switch (expr->kind)
488 case EXPR_SINGLE:
489 val = iterative_hash_expr (expr->ops.single.rhs, val);
490 break;
492 case EXPR_UNARY:
493 val = iterative_hash_object (expr->ops.unary.op, val);
495 /* Make sure to include signedness in the hash computation.
496 Don't hash the type, that can lead to having nodes which
497 compare equal according to operand_equal_p, but which
498 have different hash codes. */
499 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
500 || expr->ops.unary.op == NON_LVALUE_EXPR)
501 val += TYPE_UNSIGNED (expr->type);
503 val = iterative_hash_expr (expr->ops.unary.opnd, val);
504 break;
506 case EXPR_BINARY:
507 val = iterative_hash_object (expr->ops.binary.op, val);
508 if (commutative_tree_code (expr->ops.binary.op))
509 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
510 expr->ops.binary.opnd1, val);
511 else
513 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
514 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
516 break;
518 case EXPR_TERNARY:
519 val = iterative_hash_object (expr->ops.ternary.op, val);
520 if (commutative_ternary_tree_code (expr->ops.ternary.op))
521 val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
522 expr->ops.ternary.opnd1, val);
523 else
525 val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
526 val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
528 val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
529 break;
531 case EXPR_CALL:
533 size_t i;
534 enum tree_code code = CALL_EXPR;
535 gimple fn_from;
537 val = iterative_hash_object (code, val);
538 fn_from = expr->ops.call.fn_from;
539 if (gimple_call_internal_p (fn_from))
540 val = iterative_hash_hashval_t
541 ((hashval_t) gimple_call_internal_fn (fn_from), val);
542 else
543 val = iterative_hash_expr (gimple_call_fn (fn_from), val);
544 for (i = 0; i < expr->ops.call.nargs; i++)
545 val = iterative_hash_expr (expr->ops.call.args[i], val);
547 break;
549 case EXPR_PHI:
551 size_t i;
553 for (i = 0; i < expr->ops.phi.nargs; i++)
554 val = iterative_hash_expr (expr->ops.phi.args[i], val);
556 break;
558 default:
559 gcc_unreachable ();
562 return val;
565 /* Print a diagnostic dump of an expression hash table entry. */
567 static void
568 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
570 if (element->stmt)
571 fprintf (stream, "STMT ");
572 else
573 fprintf (stream, "COND ");
575 if (element->lhs)
577 print_generic_expr (stream, element->lhs, 0);
578 fprintf (stream, " = ");
581 switch (element->expr.kind)
583 case EXPR_SINGLE:
584 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
585 break;
587 case EXPR_UNARY:
588 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
589 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
590 break;
592 case EXPR_BINARY:
593 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
594 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
595 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
596 break;
598 case EXPR_TERNARY:
599 fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
600 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
601 fputs (", ", stream);
602 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
603 fputs (", ", stream);
604 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
605 fputs (">", stream);
606 break;
608 case EXPR_CALL:
610 size_t i;
611 size_t nargs = element->expr.ops.call.nargs;
612 gimple fn_from;
614 fn_from = element->expr.ops.call.fn_from;
615 if (gimple_call_internal_p (fn_from))
616 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
617 stream);
618 else
619 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
620 fprintf (stream, " (");
621 for (i = 0; i < nargs; i++)
623 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
624 if (i + 1 < nargs)
625 fprintf (stream, ", ");
627 fprintf (stream, ")");
629 break;
631 case EXPR_PHI:
633 size_t i;
634 size_t nargs = element->expr.ops.phi.nargs;
636 fprintf (stream, "PHI <");
637 for (i = 0; i < nargs; i++)
639 print_generic_expr (stream, element->expr.ops.phi.args[i], 0);
640 if (i + 1 < nargs)
641 fprintf (stream, ", ");
643 fprintf (stream, ">");
645 break;
647 fprintf (stream, "\n");
649 if (element->stmt)
651 fprintf (stream, " ");
652 print_gimple_stmt (stream, element->stmt, 0, 0);
656 /* Delete an expr_hash_elt and reclaim its storage. */
658 static void
659 free_expr_hash_elt (void *elt)
661 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
663 if (element->expr.kind == EXPR_CALL)
664 free (element->expr.ops.call.args);
666 if (element->expr.kind == EXPR_PHI)
667 free (element->expr.ops.phi.args);
669 free (element);
672 /* Allocate an EDGE_INFO for edge E and attach it to E.
673 Return the new EDGE_INFO structure. */
675 static struct edge_info *
676 allocate_edge_info (edge e)
678 struct edge_info *edge_info;
680 edge_info = XCNEW (struct edge_info);
682 e->aux = edge_info;
683 return edge_info;
686 /* Free all EDGE_INFO structures associated with edges in the CFG.
687 If a particular edge can be threaded, copy the redirection
688 target from the EDGE_INFO structure into the edge's AUX field
689 as required by code to update the CFG and SSA graph for
690 jump threading. */
692 static void
693 free_all_edge_infos (void)
695 basic_block bb;
696 edge_iterator ei;
697 edge e;
699 FOR_EACH_BB (bb)
701 FOR_EACH_EDGE (e, ei, bb->preds)
703 struct edge_info *edge_info = (struct edge_info *) e->aux;
705 if (edge_info)
707 if (edge_info->cond_equivalences)
708 VEC_free (cond_equivalence, heap, edge_info->cond_equivalences);
709 free (edge_info);
710 e->aux = NULL;
716 /* Jump threading, redundancy elimination and const/copy propagation.
718 This pass may expose new symbols that need to be renamed into SSA. For
719 every new symbol exposed, its corresponding bit will be set in
720 VARS_TO_RENAME. */
722 static unsigned int
723 tree_ssa_dominator_optimize (void)
725 struct dom_walk_data walk_data;
727 memset (&opt_stats, 0, sizeof (opt_stats));
729 /* Create our hash tables. */
730 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
731 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
732 const_and_copies_stack = VEC_alloc (tree, heap, 20);
733 need_eh_cleanup = BITMAP_ALLOC (NULL);
735 /* Setup callbacks for the generic dominator tree walker. */
736 walk_data.dom_direction = CDI_DOMINATORS;
737 walk_data.initialize_block_local_data = NULL;
738 walk_data.before_dom_children = dom_opt_enter_block;
739 walk_data.after_dom_children = dom_opt_leave_block;
740 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
741 When we attach more stuff we'll need to fill this out with a real
742 structure. */
743 walk_data.global_data = NULL;
744 walk_data.block_local_data_size = 0;
746 /* Now initialize the dominator walker. */
747 init_walk_dominator_tree (&walk_data);
749 calculate_dominance_info (CDI_DOMINATORS);
750 cfg_altered = false;
752 /* We need to know loop structures in order to avoid destroying them
753 in jump threading. Note that we still can e.g. thread through loop
754 headers to an exit edge, or through loop header to the loop body, assuming
755 that we update the loop info. */
756 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
758 /* Initialize the value-handle array. */
759 threadedge_initialize_values ();
761 /* We need accurate information regarding back edges in the CFG
762 for jump threading; this may include back edges that are not part of
763 a single loop. */
764 mark_dfs_back_edges ();
766 /* Recursively walk the dominator tree optimizing statements. */
767 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
770 gimple_stmt_iterator gsi;
771 basic_block bb;
772 FOR_EACH_BB (bb)
774 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
775 update_stmt_if_modified (gsi_stmt (gsi));
779 /* If we exposed any new variables, go ahead and put them into
780 SSA form now, before we handle jump threading. This simplifies
781 interactions between rewriting of _DECL nodes into SSA form
782 and rewriting SSA_NAME nodes into SSA form after block
783 duplication and CFG manipulation. */
784 update_ssa (TODO_update_ssa);
786 free_all_edge_infos ();
788 /* Thread jumps, creating duplicate blocks as needed. */
789 cfg_altered |= thread_through_all_blocks (first_pass_instance);
791 if (cfg_altered)
792 free_dominance_info (CDI_DOMINATORS);
794 /* Removal of statements may make some EH edges dead. Purge
795 such edges from the CFG as needed. */
796 if (!bitmap_empty_p (need_eh_cleanup))
798 unsigned i;
799 bitmap_iterator bi;
801 /* Jump threading may have created forwarder blocks from blocks
802 needing EH cleanup; the new successor of these blocks, which
803 has inherited from the original block, needs the cleanup. */
804 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
806 basic_block bb = BASIC_BLOCK (i);
807 if (bb
808 && single_succ_p (bb)
809 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
811 bitmap_clear_bit (need_eh_cleanup, i);
812 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
816 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
817 bitmap_zero (need_eh_cleanup);
820 statistics_counter_event (cfun, "Redundant expressions eliminated",
821 opt_stats.num_re);
822 statistics_counter_event (cfun, "Constants propagated",
823 opt_stats.num_const_prop);
824 statistics_counter_event (cfun, "Copies propagated",
825 opt_stats.num_copy_prop);
827 /* Debugging dumps. */
828 if (dump_file && (dump_flags & TDF_STATS))
829 dump_dominator_optimization_stats (dump_file);
831 loop_optimizer_finalize ();
833 /* Delete our main hashtable. */
834 htab_delete (avail_exprs);
836 /* And finalize the dominator walker. */
837 fini_walk_dominator_tree (&walk_data);
839 /* Free asserted bitmaps and stacks. */
840 BITMAP_FREE (need_eh_cleanup);
842 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
843 VEC_free (tree, heap, const_and_copies_stack);
845 /* Free the value-handle array. */
846 threadedge_finalize_values ();
847 ssa_name_values = NULL;
849 return 0;
852 static bool
853 gate_dominator (void)
855 return flag_tree_dom != 0;
858 struct gimple_opt_pass pass_dominator =
861 GIMPLE_PASS,
862 "dom", /* name */
863 gate_dominator, /* gate */
864 tree_ssa_dominator_optimize, /* execute */
865 NULL, /* sub */
866 NULL, /* next */
867 0, /* static_pass_number */
868 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
869 PROP_cfg | PROP_ssa, /* properties_required */
870 0, /* properties_provided */
871 0, /* properties_destroyed */
872 0, /* todo_flags_start */
873 TODO_cleanup_cfg
874 | TODO_update_ssa
875 | TODO_verify_ssa
876 | TODO_verify_flow /* todo_flags_finish */
881 /* Given a conditional statement CONDSTMT, convert the
882 condition to a canonical form. */
884 static void
885 canonicalize_comparison (gimple condstmt)
887 tree op0;
888 tree op1;
889 enum tree_code code;
891 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
893 op0 = gimple_cond_lhs (condstmt);
894 op1 = gimple_cond_rhs (condstmt);
896 code = gimple_cond_code (condstmt);
898 /* If it would be profitable to swap the operands, then do so to
899 canonicalize the statement, enabling better optimization.
901 By placing canonicalization of such expressions here we
902 transparently keep statements in canonical form, even
903 when the statement is modified. */
904 if (tree_swap_operands_p (op0, op1, false))
906 /* For relationals we need to swap the operands
907 and change the code. */
908 if (code == LT_EXPR
909 || code == GT_EXPR
910 || code == LE_EXPR
911 || code == GE_EXPR)
913 code = swap_tree_comparison (code);
915 gimple_cond_set_code (condstmt, code);
916 gimple_cond_set_lhs (condstmt, op1);
917 gimple_cond_set_rhs (condstmt, op0);
919 update_stmt (condstmt);
924 /* Initialize local stacks for this optimizer and record equivalences
925 upon entry to BB. Equivalences can come from the edge traversed to
926 reach BB or they may come from PHI nodes at the start of BB. */
928 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
929 LIMIT entries left in LOCALs. */
931 static void
932 remove_local_expressions_from_table (void)
934 /* Remove all the expressions made available in this block. */
935 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
937 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
938 void **slot;
940 if (victim == NULL)
941 break;
943 /* This must precede the actual removal from the hash table,
944 as ELEMENT and the table entry may share a call argument
945 vector which will be freed during removal. */
946 if (dump_file && (dump_flags & TDF_DETAILS))
948 fprintf (dump_file, "<<<< ");
949 print_expr_hash_elt (dump_file, victim);
952 slot = htab_find_slot_with_hash (avail_exprs,
953 victim, victim->hash, NO_INSERT);
954 gcc_assert (slot && *slot == (void *) victim);
955 htab_clear_slot (avail_exprs, slot);
959 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
960 CONST_AND_COPIES to its original state, stopping when we hit a
961 NULL marker. */
963 static void
964 restore_vars_to_original_value (void)
966 while (VEC_length (tree, const_and_copies_stack) > 0)
968 tree prev_value, dest;
970 dest = VEC_pop (tree, const_and_copies_stack);
972 if (dest == NULL)
973 break;
975 if (dump_file && (dump_flags & TDF_DETAILS))
977 fprintf (dump_file, "<<<< COPY ");
978 print_generic_expr (dump_file, dest, 0);
979 fprintf (dump_file, " = ");
980 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
981 fprintf (dump_file, "\n");
984 prev_value = VEC_pop (tree, const_and_copies_stack);
985 set_ssa_name_value (dest, prev_value);
989 /* A trivial wrapper so that we can present the generic jump
990 threading code with a simple API for simplifying statements. */
991 static tree
992 simplify_stmt_for_jump_threading (gimple stmt,
993 gimple within_stmt ATTRIBUTE_UNUSED)
995 return lookup_avail_expr (stmt, false);
998 /* Wrapper for common code to attempt to thread an edge. For example,
999 it handles lazily building the dummy condition and the bookkeeping
1000 when jump threading is successful. */
1002 static void
1003 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
1005 if (! walk_data->global_data)
1007 gimple dummy_cond =
1008 gimple_build_cond (NE_EXPR,
1009 integer_zero_node, integer_zero_node,
1010 NULL, NULL);
1011 walk_data->global_data = dummy_cond;
1014 thread_across_edge ((gimple) walk_data->global_data, e, false,
1015 &const_and_copies_stack,
1016 simplify_stmt_for_jump_threading);
1019 /* PHI nodes can create equivalences too.
1021 Ignoring any alternatives which are the same as the result, if
1022 all the alternatives are equal, then the PHI node creates an
1023 equivalence. */
1025 static void
1026 record_equivalences_from_phis (basic_block bb)
1028 gimple_stmt_iterator gsi;
1030 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1032 gimple phi = gsi_stmt (gsi);
1034 tree lhs = gimple_phi_result (phi);
1035 tree rhs = NULL;
1036 size_t i;
1038 for (i = 0; i < gimple_phi_num_args (phi); i++)
1040 tree t = gimple_phi_arg_def (phi, i);
1042 /* Ignore alternatives which are the same as our LHS. Since
1043 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1044 can simply compare pointers. */
1045 if (lhs == t)
1046 continue;
1048 /* If we have not processed an alternative yet, then set
1049 RHS to this alternative. */
1050 if (rhs == NULL)
1051 rhs = t;
1052 /* If we have processed an alternative (stored in RHS), then
1053 see if it is equal to this one. If it isn't, then stop
1054 the search. */
1055 else if (! operand_equal_for_phi_arg_p (rhs, t))
1056 break;
1059 /* If we had no interesting alternatives, then all the RHS alternatives
1060 must have been the same as LHS. */
1061 if (!rhs)
1062 rhs = lhs;
1064 /* If we managed to iterate through each PHI alternative without
1065 breaking out of the loop, then we have a PHI which may create
1066 a useful equivalence. We do not need to record unwind data for
1067 this, since this is a true assignment and not an equivalence
1068 inferred from a comparison. All uses of this ssa name are dominated
1069 by this assignment, so unwinding just costs time and space. */
1070 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
1071 set_ssa_name_value (lhs, rhs);
1075 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1076 return that edge. Otherwise return NULL. */
1077 static edge
1078 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1080 edge retval = NULL;
1081 edge e;
1082 edge_iterator ei;
1084 FOR_EACH_EDGE (e, ei, bb->preds)
1086 /* A loop back edge can be identified by the destination of
1087 the edge dominating the source of the edge. */
1088 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1089 continue;
1091 /* If we have already seen a non-loop edge, then we must have
1092 multiple incoming non-loop edges and thus we return NULL. */
1093 if (retval)
1094 return NULL;
1096 /* This is the first non-loop incoming edge we have found. Record
1097 it. */
1098 retval = e;
1101 return retval;
1104 /* Record any equivalences created by the incoming edge to BB. If BB
1105 has more than one incoming edge, then no equivalence is created. */
1107 static void
1108 record_equivalences_from_incoming_edge (basic_block bb)
1110 edge e;
1111 basic_block parent;
1112 struct edge_info *edge_info;
1114 /* If our parent block ended with a control statement, then we may be
1115 able to record some equivalences based on which outgoing edge from
1116 the parent was followed. */
1117 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1119 e = single_incoming_edge_ignoring_loop_edges (bb);
1121 /* If we had a single incoming edge from our parent block, then enter
1122 any data associated with the edge into our tables. */
1123 if (e && e->src == parent)
1125 unsigned int i;
1127 edge_info = (struct edge_info *) e->aux;
1129 if (edge_info)
1131 tree lhs = edge_info->lhs;
1132 tree rhs = edge_info->rhs;
1133 cond_equivalence *eq;
1135 if (lhs)
1136 record_equality (lhs, rhs);
1138 for (i = 0; VEC_iterate (cond_equivalence,
1139 edge_info->cond_equivalences, i, eq); ++i)
1140 record_cond (eq);
1145 /* Dump SSA statistics on FILE. */
1147 void
1148 dump_dominator_optimization_stats (FILE *file)
1150 fprintf (file, "Total number of statements: %6ld\n\n",
1151 opt_stats.num_stmts);
1152 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1153 opt_stats.num_exprs_considered);
1155 fprintf (file, "\nHash table statistics:\n");
1157 fprintf (file, " avail_exprs: ");
1158 htab_statistics (file, avail_exprs);
1162 /* Dump SSA statistics on stderr. */
1164 DEBUG_FUNCTION void
1165 debug_dominator_optimization_stats (void)
1167 dump_dominator_optimization_stats (stderr);
1171 /* Dump statistics for the hash table HTAB. */
1173 static void
1174 htab_statistics (FILE *file, htab_t htab)
1176 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1177 (long) htab_size (htab),
1178 (long) htab_elements (htab),
1179 htab_collisions (htab));
1183 /* Enter condition equivalence into the expression hash table.
1184 This indicates that a conditional expression has a known
1185 boolean value. */
1187 static void
1188 record_cond (cond_equivalence *p)
1190 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1191 void **slot;
1193 initialize_hash_element_from_expr (&p->cond, p->value, element);
1195 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1196 element->hash, INSERT);
1197 if (*slot == NULL)
1199 *slot = (void *) element;
1201 if (dump_file && (dump_flags & TDF_DETAILS))
1203 fprintf (dump_file, "1>>> ");
1204 print_expr_hash_elt (dump_file, element);
1207 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1209 else
1210 free (element);
1213 /* Build a cond_equivalence record indicating that the comparison
1214 CODE holds between operands OP0 and OP1 and push it to **P. */
1216 static void
1217 build_and_record_new_cond (enum tree_code code,
1218 tree op0, tree op1,
1219 VEC(cond_equivalence, heap) **p)
1221 cond_equivalence c;
1222 struct hashable_expr *cond = &c.cond;
1224 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1226 cond->type = boolean_type_node;
1227 cond->kind = EXPR_BINARY;
1228 cond->ops.binary.op = code;
1229 cond->ops.binary.opnd0 = op0;
1230 cond->ops.binary.opnd1 = op1;
1232 c.value = boolean_true_node;
1233 VEC_safe_push (cond_equivalence, heap, *p, &c);
1236 /* Record that COND is true and INVERTED is false into the edge information
1237 structure. Also record that any conditions dominated by COND are true
1238 as well.
1240 For example, if a < b is true, then a <= b must also be true. */
1242 static void
1243 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1245 tree op0, op1;
1246 cond_equivalence c;
1248 if (!COMPARISON_CLASS_P (cond))
1249 return;
1251 op0 = TREE_OPERAND (cond, 0);
1252 op1 = TREE_OPERAND (cond, 1);
1254 switch (TREE_CODE (cond))
1256 case LT_EXPR:
1257 case GT_EXPR:
1258 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1260 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1261 &edge_info->cond_equivalences);
1262 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1263 &edge_info->cond_equivalences);
1266 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1267 ? LE_EXPR : GE_EXPR),
1268 op0, op1, &edge_info->cond_equivalences);
1269 build_and_record_new_cond (NE_EXPR, op0, op1,
1270 &edge_info->cond_equivalences);
1271 break;
1273 case GE_EXPR:
1274 case LE_EXPR:
1275 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1277 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1278 &edge_info->cond_equivalences);
1280 break;
1282 case EQ_EXPR:
1283 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1285 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1286 &edge_info->cond_equivalences);
1288 build_and_record_new_cond (LE_EXPR, op0, op1,
1289 &edge_info->cond_equivalences);
1290 build_and_record_new_cond (GE_EXPR, op0, op1,
1291 &edge_info->cond_equivalences);
1292 break;
1294 case UNORDERED_EXPR:
1295 build_and_record_new_cond (NE_EXPR, op0, op1,
1296 &edge_info->cond_equivalences);
1297 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1298 &edge_info->cond_equivalences);
1299 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1300 &edge_info->cond_equivalences);
1301 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1302 &edge_info->cond_equivalences);
1303 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1304 &edge_info->cond_equivalences);
1305 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1306 &edge_info->cond_equivalences);
1307 break;
1309 case UNLT_EXPR:
1310 case UNGT_EXPR:
1311 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1312 ? UNLE_EXPR : UNGE_EXPR),
1313 op0, op1, &edge_info->cond_equivalences);
1314 build_and_record_new_cond (NE_EXPR, op0, op1,
1315 &edge_info->cond_equivalences);
1316 break;
1318 case UNEQ_EXPR:
1319 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1320 &edge_info->cond_equivalences);
1321 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1322 &edge_info->cond_equivalences);
1323 break;
1325 case LTGT_EXPR:
1326 build_and_record_new_cond (NE_EXPR, op0, op1,
1327 &edge_info->cond_equivalences);
1328 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1329 &edge_info->cond_equivalences);
1330 break;
1332 default:
1333 break;
1336 /* Now store the original true and false conditions into the first
1337 two slots. */
1338 initialize_expr_from_cond (cond, &c.cond);
1339 c.value = boolean_true_node;
1340 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1342 /* It is possible for INVERTED to be the negation of a comparison,
1343 and not a valid RHS or GIMPLE_COND condition. This happens because
1344 invert_truthvalue may return such an expression when asked to invert
1345 a floating-point comparison. These comparisons are not assumed to
1346 obey the trichotomy law. */
1347 initialize_expr_from_cond (inverted, &c.cond);
1348 c.value = boolean_false_node;
1349 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1352 /* A helper function for record_const_or_copy and record_equality.
1353 Do the work of recording the value and undo info. */
1355 static void
1356 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1358 set_ssa_name_value (x, y);
1360 if (dump_file && (dump_flags & TDF_DETAILS))
1362 fprintf (dump_file, "0>>> COPY ");
1363 print_generic_expr (dump_file, x, 0);
1364 fprintf (dump_file, " = ");
1365 print_generic_expr (dump_file, y, 0);
1366 fprintf (dump_file, "\n");
1369 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1370 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1371 VEC_quick_push (tree, const_and_copies_stack, x);
1374 /* Return the loop depth of the basic block of the defining statement of X.
1375 This number should not be treated as absolutely correct because the loop
1376 information may not be completely up-to-date when dom runs. However, it
1377 will be relatively correct, and as more passes are taught to keep loop info
1378 up to date, the result will become more and more accurate. */
1381 loop_depth_of_name (tree x)
1383 gimple defstmt;
1384 basic_block defbb;
1386 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1387 if (TREE_CODE (x) != SSA_NAME)
1388 return 0;
1390 /* Otherwise return the loop depth of the defining statement's bb.
1391 Note that there may not actually be a bb for this statement, if the
1392 ssa_name is live on entry. */
1393 defstmt = SSA_NAME_DEF_STMT (x);
1394 defbb = gimple_bb (defstmt);
1395 if (!defbb)
1396 return 0;
1398 return defbb->loop_depth;
1401 /* Record that X is equal to Y in const_and_copies. Record undo
1402 information in the block-local vector. */
1404 static void
1405 record_const_or_copy (tree x, tree y)
1407 tree prev_x = SSA_NAME_VALUE (x);
1409 gcc_assert (TREE_CODE (x) == SSA_NAME);
1411 if (TREE_CODE (y) == SSA_NAME)
1413 tree tmp = SSA_NAME_VALUE (y);
1414 if (tmp)
1415 y = tmp;
1418 record_const_or_copy_1 (x, y, prev_x);
1421 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1422 This constrains the cases in which we may treat this as assignment. */
1424 static void
1425 record_equality (tree x, tree y)
1427 tree prev_x = NULL, prev_y = NULL;
1429 if (TREE_CODE (x) == SSA_NAME)
1430 prev_x = SSA_NAME_VALUE (x);
1431 if (TREE_CODE (y) == SSA_NAME)
1432 prev_y = SSA_NAME_VALUE (y);
1434 /* If one of the previous values is invariant, or invariant in more loops
1435 (by depth), then use that.
1436 Otherwise it doesn't matter which value we choose, just so
1437 long as we canonicalize on one value. */
1438 if (is_gimple_min_invariant (y))
1440 else if (is_gimple_min_invariant (x)
1441 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1442 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1443 else if (prev_x && is_gimple_min_invariant (prev_x))
1444 x = y, y = prev_x, prev_x = prev_y;
1445 else if (prev_y)
1446 y = prev_y;
1448 /* After the swapping, we must have one SSA_NAME. */
1449 if (TREE_CODE (x) != SSA_NAME)
1450 return;
1452 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1453 variable compared against zero. If we're honoring signed zeros,
1454 then we cannot record this value unless we know that the value is
1455 nonzero. */
1456 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1457 && (TREE_CODE (y) != REAL_CST
1458 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1459 return;
1461 record_const_or_copy_1 (x, y, prev_x);
1464 /* Returns true when STMT is a simple iv increment. It detects the
1465 following situation:
1467 i_1 = phi (..., i_2)
1468 i_2 = i_1 +/- ... */
1470 bool
1471 simple_iv_increment_p (gimple stmt)
1473 enum tree_code code;
1474 tree lhs, preinc;
1475 gimple phi;
1476 size_t i;
1478 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1479 return false;
1481 lhs = gimple_assign_lhs (stmt);
1482 if (TREE_CODE (lhs) != SSA_NAME)
1483 return false;
1485 code = gimple_assign_rhs_code (stmt);
1486 if (code != PLUS_EXPR
1487 && code != MINUS_EXPR
1488 && code != POINTER_PLUS_EXPR)
1489 return false;
1491 preinc = gimple_assign_rhs1 (stmt);
1492 if (TREE_CODE (preinc) != SSA_NAME)
1493 return false;
1495 phi = SSA_NAME_DEF_STMT (preinc);
1496 if (gimple_code (phi) != GIMPLE_PHI)
1497 return false;
1499 for (i = 0; i < gimple_phi_num_args (phi); i++)
1500 if (gimple_phi_arg_def (phi, i) == lhs)
1501 return true;
1503 return false;
1506 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1507 known value for that SSA_NAME (or NULL if no value is known).
1509 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1510 successors of BB. */
1512 static void
1513 cprop_into_successor_phis (basic_block bb)
1515 edge e;
1516 edge_iterator ei;
1518 FOR_EACH_EDGE (e, ei, bb->succs)
1520 int indx;
1521 gimple_stmt_iterator gsi;
1523 /* If this is an abnormal edge, then we do not want to copy propagate
1524 into the PHI alternative associated with this edge. */
1525 if (e->flags & EDGE_ABNORMAL)
1526 continue;
1528 gsi = gsi_start_phis (e->dest);
1529 if (gsi_end_p (gsi))
1530 continue;
1532 indx = e->dest_idx;
1533 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1535 tree new_val;
1536 use_operand_p orig_p;
1537 tree orig_val;
1538 gimple phi = gsi_stmt (gsi);
1540 /* The alternative may be associated with a constant, so verify
1541 it is an SSA_NAME before doing anything with it. */
1542 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1543 orig_val = get_use_from_ptr (orig_p);
1544 if (TREE_CODE (orig_val) != SSA_NAME)
1545 continue;
1547 /* If we have *ORIG_P in our constant/copy table, then replace
1548 ORIG_P with its value in our constant/copy table. */
1549 new_val = SSA_NAME_VALUE (orig_val);
1550 if (new_val
1551 && new_val != orig_val
1552 && (TREE_CODE (new_val) == SSA_NAME
1553 || is_gimple_min_invariant (new_val))
1554 && may_propagate_copy (orig_val, new_val))
1555 propagate_value (orig_p, new_val);
1560 /* We have finished optimizing BB, record any information implied by
1561 taking a specific outgoing edge from BB. */
1563 static void
1564 record_edge_info (basic_block bb)
1566 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1567 struct edge_info *edge_info;
1569 if (! gsi_end_p (gsi))
1571 gimple stmt = gsi_stmt (gsi);
1572 location_t loc = gimple_location (stmt);
1574 if (gimple_code (stmt) == GIMPLE_SWITCH)
1576 tree index = gimple_switch_index (stmt);
1578 if (TREE_CODE (index) == SSA_NAME)
1580 int i;
1581 int n_labels = gimple_switch_num_labels (stmt);
1582 tree *info = XCNEWVEC (tree, last_basic_block);
1583 edge e;
1584 edge_iterator ei;
1586 for (i = 0; i < n_labels; i++)
1588 tree label = gimple_switch_label (stmt, i);
1589 basic_block target_bb = label_to_block (CASE_LABEL (label));
1590 if (CASE_HIGH (label)
1591 || !CASE_LOW (label)
1592 || info[target_bb->index])
1593 info[target_bb->index] = error_mark_node;
1594 else
1595 info[target_bb->index] = label;
1598 FOR_EACH_EDGE (e, ei, bb->succs)
1600 basic_block target_bb = e->dest;
1601 tree label = info[target_bb->index];
1603 if (label != NULL && label != error_mark_node)
1605 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1606 CASE_LOW (label));
1607 edge_info = allocate_edge_info (e);
1608 edge_info->lhs = index;
1609 edge_info->rhs = x;
1612 free (info);
1616 /* A COND_EXPR may create equivalences too. */
1617 if (gimple_code (stmt) == GIMPLE_COND)
1619 edge true_edge;
1620 edge false_edge;
1622 tree op0 = gimple_cond_lhs (stmt);
1623 tree op1 = gimple_cond_rhs (stmt);
1624 enum tree_code code = gimple_cond_code (stmt);
1626 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1628 /* Special case comparing booleans against a constant as we
1629 know the value of OP0 on both arms of the branch. i.e., we
1630 can record an equivalence for OP0 rather than COND. */
1631 if ((code == EQ_EXPR || code == NE_EXPR)
1632 && TREE_CODE (op0) == SSA_NAME
1633 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1634 && is_gimple_min_invariant (op1))
1636 if (code == EQ_EXPR)
1638 edge_info = allocate_edge_info (true_edge);
1639 edge_info->lhs = op0;
1640 edge_info->rhs = (integer_zerop (op1)
1641 ? boolean_false_node
1642 : boolean_true_node);
1644 edge_info = allocate_edge_info (false_edge);
1645 edge_info->lhs = op0;
1646 edge_info->rhs = (integer_zerop (op1)
1647 ? boolean_true_node
1648 : boolean_false_node);
1650 else
1652 edge_info = allocate_edge_info (true_edge);
1653 edge_info->lhs = op0;
1654 edge_info->rhs = (integer_zerop (op1)
1655 ? boolean_true_node
1656 : boolean_false_node);
1658 edge_info = allocate_edge_info (false_edge);
1659 edge_info->lhs = op0;
1660 edge_info->rhs = (integer_zerop (op1)
1661 ? boolean_false_node
1662 : boolean_true_node);
1665 else if (is_gimple_min_invariant (op0)
1666 && (TREE_CODE (op1) == SSA_NAME
1667 || is_gimple_min_invariant (op1)))
1669 tree cond = build2 (code, boolean_type_node, op0, op1);
1670 tree inverted = invert_truthvalue_loc (loc, cond);
1671 bool can_infer_simple_equiv
1672 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
1673 && real_zerop (op0));
1674 struct edge_info *edge_info;
1676 edge_info = allocate_edge_info (true_edge);
1677 record_conditions (edge_info, cond, inverted);
1679 if (can_infer_simple_equiv && code == EQ_EXPR)
1681 edge_info->lhs = op1;
1682 edge_info->rhs = op0;
1685 edge_info = allocate_edge_info (false_edge);
1686 record_conditions (edge_info, inverted, cond);
1688 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1690 edge_info->lhs = op1;
1691 edge_info->rhs = op0;
1695 else if (TREE_CODE (op0) == SSA_NAME
1696 && (TREE_CODE (op1) == SSA_NAME
1697 || is_gimple_min_invariant (op1)))
1699 tree cond = build2 (code, boolean_type_node, op0, op1);
1700 tree inverted = invert_truthvalue_loc (loc, cond);
1701 bool can_infer_simple_equiv
1702 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op1)))
1703 && (TREE_CODE (op1) == SSA_NAME || real_zerop (op1)));
1704 struct edge_info *edge_info;
1706 edge_info = allocate_edge_info (true_edge);
1707 record_conditions (edge_info, cond, inverted);
1709 if (can_infer_simple_equiv && code == EQ_EXPR)
1711 edge_info->lhs = op0;
1712 edge_info->rhs = op1;
1715 edge_info = allocate_edge_info (false_edge);
1716 record_conditions (edge_info, inverted, cond);
1718 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1720 edge_info->lhs = op0;
1721 edge_info->rhs = op1;
1726 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1730 static void
1731 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1732 basic_block bb)
1734 gimple_stmt_iterator gsi;
1736 if (dump_file && (dump_flags & TDF_DETAILS))
1737 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1739 /* Push a marker on the stacks of local information so that we know how
1740 far to unwind when we finalize this block. */
1741 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1742 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1744 record_equivalences_from_incoming_edge (bb);
1746 /* PHI nodes can create equivalences too. */
1747 record_equivalences_from_phis (bb);
1749 /* Create equivalences from redundant PHIs. PHIs are only truly
1750 redundant when they exist in the same block, so push another
1751 marker and unwind right afterwards. */
1752 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1753 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1754 eliminate_redundant_computations (&gsi);
1755 remove_local_expressions_from_table ();
1757 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1758 optimize_stmt (bb, gsi);
1760 /* Now prepare to process dominated blocks. */
1761 record_edge_info (bb);
1762 cprop_into_successor_phis (bb);
1765 /* We have finished processing the dominator children of BB, perform
1766 any finalization actions in preparation for leaving this node in
1767 the dominator tree. */
1769 static void
1770 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1772 gimple last;
1774 /* If we have an outgoing edge to a block with multiple incoming and
1775 outgoing edges, then we may be able to thread the edge, i.e., we
1776 may be able to statically determine which of the outgoing edges
1777 will be traversed when the incoming edge from BB is traversed. */
1778 if (single_succ_p (bb)
1779 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1780 && potentially_threadable_block (single_succ (bb)))
1782 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1784 else if ((last = last_stmt (bb))
1785 && gimple_code (last) == GIMPLE_COND
1786 && EDGE_COUNT (bb->succs) == 2
1787 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1788 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1790 edge true_edge, false_edge;
1792 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1794 /* Only try to thread the edge if it reaches a target block with
1795 more than one predecessor and more than one successor. */
1796 if (potentially_threadable_block (true_edge->dest))
1798 struct edge_info *edge_info;
1799 unsigned int i;
1801 /* Push a marker onto the available expression stack so that we
1802 unwind any expressions related to the TRUE arm before processing
1803 the false arm below. */
1804 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1805 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1807 edge_info = (struct edge_info *) true_edge->aux;
1809 /* If we have info associated with this edge, record it into
1810 our equivalence tables. */
1811 if (edge_info)
1813 cond_equivalence *eq;
1814 tree lhs = edge_info->lhs;
1815 tree rhs = edge_info->rhs;
1817 /* If we have a simple NAME = VALUE equivalence, record it. */
1818 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1819 record_const_or_copy (lhs, rhs);
1821 /* If we have 0 = COND or 1 = COND equivalences, record them
1822 into our expression hash tables. */
1823 for (i = 0; VEC_iterate (cond_equivalence,
1824 edge_info->cond_equivalences, i, eq); ++i)
1825 record_cond (eq);
1828 dom_thread_across_edge (walk_data, true_edge);
1830 /* And restore the various tables to their state before
1831 we threaded this edge. */
1832 remove_local_expressions_from_table ();
1835 /* Similarly for the ELSE arm. */
1836 if (potentially_threadable_block (false_edge->dest))
1838 struct edge_info *edge_info;
1839 unsigned int i;
1841 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1842 edge_info = (struct edge_info *) false_edge->aux;
1844 /* If we have info associated with this edge, record it into
1845 our equivalence tables. */
1846 if (edge_info)
1848 cond_equivalence *eq;
1849 tree lhs = edge_info->lhs;
1850 tree rhs = edge_info->rhs;
1852 /* If we have a simple NAME = VALUE equivalence, record it. */
1853 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1854 record_const_or_copy (lhs, rhs);
1856 /* If we have 0 = COND or 1 = COND equivalences, record them
1857 into our expression hash tables. */
1858 for (i = 0; VEC_iterate (cond_equivalence,
1859 edge_info->cond_equivalences, i, eq); ++i)
1860 record_cond (eq);
1863 /* Now thread the edge. */
1864 dom_thread_across_edge (walk_data, false_edge);
1866 /* No need to remove local expressions from our tables
1867 or restore vars to their original value as that will
1868 be done immediately below. */
1872 remove_local_expressions_from_table ();
1873 restore_vars_to_original_value ();
1876 /* Search for redundant computations in STMT. If any are found, then
1877 replace them with the variable holding the result of the computation.
1879 If safe, record this expression into the available expression hash
1880 table. */
1882 static void
1883 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1885 tree expr_type;
1886 tree cached_lhs;
1887 tree def;
1888 bool insert = true;
1889 bool assigns_var_p = false;
1891 gimple stmt = gsi_stmt (*gsi);
1893 if (gimple_code (stmt) == GIMPLE_PHI)
1894 def = gimple_phi_result (stmt);
1895 else
1896 def = gimple_get_lhs (stmt);
1898 /* Certain expressions on the RHS can be optimized away, but can not
1899 themselves be entered into the hash tables. */
1900 if (! def
1901 || TREE_CODE (def) != SSA_NAME
1902 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1903 || gimple_vdef (stmt)
1904 /* Do not record equivalences for increments of ivs. This would create
1905 overlapping live ranges for a very questionable gain. */
1906 || simple_iv_increment_p (stmt))
1907 insert = false;
1909 /* Check if the expression has been computed before. */
1910 cached_lhs = lookup_avail_expr (stmt, insert);
1912 opt_stats.num_exprs_considered++;
1914 /* Get the type of the expression we are trying to optimize. */
1915 if (is_gimple_assign (stmt))
1917 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1918 assigns_var_p = true;
1920 else if (gimple_code (stmt) == GIMPLE_COND)
1921 expr_type = boolean_type_node;
1922 else if (is_gimple_call (stmt))
1924 gcc_assert (gimple_call_lhs (stmt));
1925 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1926 assigns_var_p = true;
1928 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1929 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1930 else if (gimple_code (stmt) == GIMPLE_PHI)
1931 /* We can't propagate into a phi, so the logic below doesn't apply.
1932 Instead record an equivalence between the cached LHS and the
1933 PHI result of this statement, provided they are in the same block.
1934 This should be sufficient to kill the redundant phi. */
1936 if (def && cached_lhs)
1937 record_const_or_copy (def, cached_lhs);
1938 return;
1940 else
1941 gcc_unreachable ();
1943 if (!cached_lhs)
1944 return;
1946 /* It is safe to ignore types here since we have already done
1947 type checking in the hashing and equality routines. In fact
1948 type checking here merely gets in the way of constant
1949 propagation. Also, make sure that it is safe to propagate
1950 CACHED_LHS into the expression in STMT. */
1951 if ((TREE_CODE (cached_lhs) != SSA_NAME
1952 && (assigns_var_p
1953 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1954 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1956 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
1957 || is_gimple_min_invariant (cached_lhs));
1959 if (dump_file && (dump_flags & TDF_DETAILS))
1961 fprintf (dump_file, " Replaced redundant expr '");
1962 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1963 fprintf (dump_file, "' with '");
1964 print_generic_expr (dump_file, cached_lhs, dump_flags);
1965 fprintf (dump_file, "'\n");
1968 opt_stats.num_re++;
1970 if (assigns_var_p
1971 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1972 cached_lhs = fold_convert (expr_type, cached_lhs);
1974 propagate_tree_value_into_stmt (gsi, cached_lhs);
1976 /* Since it is always necessary to mark the result as modified,
1977 perhaps we should move this into propagate_tree_value_into_stmt
1978 itself. */
1979 gimple_set_modified (gsi_stmt (*gsi), true);
1983 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1984 the available expressions table or the const_and_copies table.
1985 Detect and record those equivalences. */
1986 /* We handle only very simple copy equivalences here. The heavy
1987 lifing is done by eliminate_redundant_computations. */
1989 static void
1990 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1992 tree lhs;
1993 enum tree_code lhs_code;
1995 gcc_assert (is_gimple_assign (stmt));
1997 lhs = gimple_assign_lhs (stmt);
1998 lhs_code = TREE_CODE (lhs);
2000 if (lhs_code == SSA_NAME
2001 && gimple_assign_single_p (stmt))
2003 tree rhs = gimple_assign_rhs1 (stmt);
2005 /* If the RHS of the assignment is a constant or another variable that
2006 may be propagated, register it in the CONST_AND_COPIES table. We
2007 do not need to record unwind data for this, since this is a true
2008 assignment and not an equivalence inferred from a comparison. All
2009 uses of this ssa name are dominated by this assignment, so unwinding
2010 just costs time and space. */
2011 if (may_optimize_p
2012 && (TREE_CODE (rhs) == SSA_NAME
2013 || is_gimple_min_invariant (rhs)))
2015 if (dump_file && (dump_flags & TDF_DETAILS))
2017 fprintf (dump_file, "==== ASGN ");
2018 print_generic_expr (dump_file, lhs, 0);
2019 fprintf (dump_file, " = ");
2020 print_generic_expr (dump_file, rhs, 0);
2021 fprintf (dump_file, "\n");
2024 set_ssa_name_value (lhs, rhs);
2028 /* A memory store, even an aliased store, creates a useful
2029 equivalence. By exchanging the LHS and RHS, creating suitable
2030 vops and recording the result in the available expression table,
2031 we may be able to expose more redundant loads. */
2032 if (!gimple_has_volatile_ops (stmt)
2033 && gimple_references_memory_p (stmt)
2034 && gimple_assign_single_p (stmt)
2035 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2036 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2037 && !is_gimple_reg (lhs))
2039 tree rhs = gimple_assign_rhs1 (stmt);
2040 gimple new_stmt;
2042 /* Build a new statement with the RHS and LHS exchanged. */
2043 if (TREE_CODE (rhs) == SSA_NAME)
2045 /* NOTE tuples. The call to gimple_build_assign below replaced
2046 a call to build_gimple_modify_stmt, which did not set the
2047 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
2048 may cause an SSA validation failure, as the LHS may be a
2049 default-initialized name and should have no definition. I'm
2050 a bit dubious of this, as the artificial statement that we
2051 generate here may in fact be ill-formed, but it is simply
2052 used as an internal device in this pass, and never becomes
2053 part of the CFG. */
2054 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2055 new_stmt = gimple_build_assign (rhs, lhs);
2056 SSA_NAME_DEF_STMT (rhs) = defstmt;
2058 else
2059 new_stmt = gimple_build_assign (rhs, lhs);
2061 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
2063 /* Finally enter the statement into the available expression
2064 table. */
2065 lookup_avail_expr (new_stmt, true);
2069 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2070 CONST_AND_COPIES. */
2072 static void
2073 cprop_operand (gimple stmt, use_operand_p op_p)
2075 tree val;
2076 tree op = USE_FROM_PTR (op_p);
2078 /* If the operand has a known constant value or it is known to be a
2079 copy of some other variable, use the value or copy stored in
2080 CONST_AND_COPIES. */
2081 val = SSA_NAME_VALUE (op);
2082 if (val && val != op)
2084 /* Do not replace hard register operands in asm statements. */
2085 if (gimple_code (stmt) == GIMPLE_ASM
2086 && !may_propagate_copy_into_asm (op))
2087 return;
2089 /* Certain operands are not allowed to be copy propagated due
2090 to their interaction with exception handling and some GCC
2091 extensions. */
2092 if (!may_propagate_copy (op, val))
2093 return;
2095 /* Do not propagate addresses that point to volatiles into memory
2096 stmts without volatile operands. */
2097 if (POINTER_TYPE_P (TREE_TYPE (val))
2098 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2099 && gimple_has_mem_ops (stmt)
2100 && !gimple_has_volatile_ops (stmt))
2101 return;
2103 /* Do not propagate copies if the propagated value is at a deeper loop
2104 depth than the propagatee. Otherwise, this may move loop variant
2105 variables outside of their loops and prevent coalescing
2106 opportunities. If the value was loop invariant, it will be hoisted
2107 by LICM and exposed for copy propagation. */
2108 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2109 return;
2111 /* Do not propagate copies into simple IV increment statements.
2112 See PR23821 for how this can disturb IV analysis. */
2113 if (TREE_CODE (val) != INTEGER_CST
2114 && simple_iv_increment_p (stmt))
2115 return;
2117 /* Dump details. */
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2120 fprintf (dump_file, " Replaced '");
2121 print_generic_expr (dump_file, op, dump_flags);
2122 fprintf (dump_file, "' with %s '",
2123 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2124 print_generic_expr (dump_file, val, dump_flags);
2125 fprintf (dump_file, "'\n");
2128 if (TREE_CODE (val) != SSA_NAME)
2129 opt_stats.num_const_prop++;
2130 else
2131 opt_stats.num_copy_prop++;
2133 propagate_value (op_p, val);
2135 /* And note that we modified this statement. This is now
2136 safe, even if we changed virtual operands since we will
2137 rescan the statement and rewrite its operands again. */
2138 gimple_set_modified (stmt, true);
2142 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2143 known value for that SSA_NAME (or NULL if no value is known).
2145 Propagate values from CONST_AND_COPIES into the uses, vuses and
2146 vdef_ops of STMT. */
2148 static void
2149 cprop_into_stmt (gimple stmt)
2151 use_operand_p op_p;
2152 ssa_op_iter iter;
2154 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_USE)
2155 cprop_operand (stmt, op_p);
2158 /* Optimize the statement pointed to by iterator SI.
2160 We try to perform some simplistic global redundancy elimination and
2161 constant propagation:
2163 1- To detect global redundancy, we keep track of expressions that have
2164 been computed in this block and its dominators. If we find that the
2165 same expression is computed more than once, we eliminate repeated
2166 computations by using the target of the first one.
2168 2- Constant values and copy assignments. This is used to do very
2169 simplistic constant and copy propagation. When a constant or copy
2170 assignment is found, we map the value on the RHS of the assignment to
2171 the variable in the LHS in the CONST_AND_COPIES table. */
2173 static void
2174 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2176 gimple stmt, old_stmt;
2177 bool may_optimize_p;
2178 bool modified_p = false;
2180 old_stmt = stmt = gsi_stmt (si);
2182 if (dump_file && (dump_flags & TDF_DETAILS))
2184 fprintf (dump_file, "Optimizing statement ");
2185 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2188 if (gimple_code (stmt) == GIMPLE_COND)
2189 canonicalize_comparison (stmt);
2191 update_stmt_if_modified (stmt);
2192 opt_stats.num_stmts++;
2194 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2195 cprop_into_stmt (stmt);
2197 /* If the statement has been modified with constant replacements,
2198 fold its RHS before checking for redundant computations. */
2199 if (gimple_modified_p (stmt))
2201 tree rhs = NULL;
2203 /* Try to fold the statement making sure that STMT is kept
2204 up to date. */
2205 if (fold_stmt (&si))
2207 stmt = gsi_stmt (si);
2208 gimple_set_modified (stmt, true);
2210 if (dump_file && (dump_flags & TDF_DETAILS))
2212 fprintf (dump_file, " Folded to: ");
2213 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2217 /* We only need to consider cases that can yield a gimple operand. */
2218 if (gimple_assign_single_p (stmt))
2219 rhs = gimple_assign_rhs1 (stmt);
2220 else if (gimple_code (stmt) == GIMPLE_GOTO)
2221 rhs = gimple_goto_dest (stmt);
2222 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2223 /* This should never be an ADDR_EXPR. */
2224 rhs = gimple_switch_index (stmt);
2226 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2227 recompute_tree_invariant_for_addr_expr (rhs);
2229 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2230 even if fold_stmt updated the stmt already and thus cleared
2231 gimple_modified_p flag on it. */
2232 modified_p = true;
2235 /* Check for redundant computations. Do this optimization only
2236 for assignments that have no volatile ops and conditionals. */
2237 may_optimize_p = (!gimple_has_side_effects (stmt)
2238 && (is_gimple_assign (stmt)
2239 || (is_gimple_call (stmt)
2240 && gimple_call_lhs (stmt) != NULL_TREE)
2241 || gimple_code (stmt) == GIMPLE_COND
2242 || gimple_code (stmt) == GIMPLE_SWITCH));
2244 if (may_optimize_p)
2246 if (gimple_code (stmt) == GIMPLE_CALL)
2248 /* Resolve __builtin_constant_p. If it hasn't been
2249 folded to integer_one_node by now, it's fairly
2250 certain that the value simply isn't constant. */
2251 tree callee = gimple_call_fndecl (stmt);
2252 if (callee
2253 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2254 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2256 propagate_tree_value_into_stmt (&si, integer_zero_node);
2257 stmt = gsi_stmt (si);
2261 update_stmt_if_modified (stmt);
2262 eliminate_redundant_computations (&si);
2263 stmt = gsi_stmt (si);
2265 /* Perform simple redundant store elimination. */
2266 if (gimple_assign_single_p (stmt)
2267 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2269 tree lhs = gimple_assign_lhs (stmt);
2270 tree rhs = gimple_assign_rhs1 (stmt);
2271 tree cached_lhs;
2272 gimple new_stmt;
2273 if (TREE_CODE (rhs) == SSA_NAME)
2275 tree tem = SSA_NAME_VALUE (rhs);
2276 if (tem)
2277 rhs = tem;
2279 /* Build a new statement with the RHS and LHS exchanged. */
2280 if (TREE_CODE (rhs) == SSA_NAME)
2282 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2283 new_stmt = gimple_build_assign (rhs, lhs);
2284 SSA_NAME_DEF_STMT (rhs) = defstmt;
2286 else
2287 new_stmt = gimple_build_assign (rhs, lhs);
2288 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2289 cached_lhs = lookup_avail_expr (new_stmt, false);
2290 if (cached_lhs
2291 && rhs == cached_lhs)
2293 basic_block bb = gimple_bb (stmt);
2294 int lp_nr = lookup_stmt_eh_lp (stmt);
2295 unlink_stmt_vdef (stmt);
2296 gsi_remove (&si, true);
2297 if (lp_nr != 0)
2299 bitmap_set_bit (need_eh_cleanup, bb->index);
2300 if (dump_file && (dump_flags & TDF_DETAILS))
2301 fprintf (dump_file, " Flagged to clear EH edges.\n");
2303 return;
2308 /* Record any additional equivalences created by this statement. */
2309 if (is_gimple_assign (stmt))
2310 record_equivalences_from_stmt (stmt, may_optimize_p);
2312 /* If STMT is a COND_EXPR and it was modified, then we may know
2313 where it goes. If that is the case, then mark the CFG as altered.
2315 This will cause us to later call remove_unreachable_blocks and
2316 cleanup_tree_cfg when it is safe to do so. It is not safe to
2317 clean things up here since removal of edges and such can trigger
2318 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2319 the manager.
2321 That's all fine and good, except that once SSA_NAMEs are released
2322 to the manager, we must not call create_ssa_name until all references
2323 to released SSA_NAMEs have been eliminated.
2325 All references to the deleted SSA_NAMEs can not be eliminated until
2326 we remove unreachable blocks.
2328 We can not remove unreachable blocks until after we have completed
2329 any queued jump threading.
2331 We can not complete any queued jump threads until we have taken
2332 appropriate variables out of SSA form. Taking variables out of
2333 SSA form can call create_ssa_name and thus we lose.
2335 Ultimately I suspect we're going to need to change the interface
2336 into the SSA_NAME manager. */
2337 if (gimple_modified_p (stmt) || modified_p)
2339 tree val = NULL;
2341 update_stmt_if_modified (stmt);
2343 if (gimple_code (stmt) == GIMPLE_COND)
2344 val = fold_binary_loc (gimple_location (stmt),
2345 gimple_cond_code (stmt), boolean_type_node,
2346 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2347 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2348 val = gimple_switch_index (stmt);
2350 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2351 cfg_altered = true;
2353 /* If we simplified a statement in such a way as to be shown that it
2354 cannot trap, update the eh information and the cfg to match. */
2355 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2357 bitmap_set_bit (need_eh_cleanup, bb->index);
2358 if (dump_file && (dump_flags & TDF_DETAILS))
2359 fprintf (dump_file, " Flagged to clear EH edges.\n");
2364 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2365 If found, return its LHS. Otherwise insert STMT in the table and
2366 return NULL_TREE.
2368 Also, when an expression is first inserted in the table, it is also
2369 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2370 we finish processing this block and its children. */
2372 static tree
2373 lookup_avail_expr (gimple stmt, bool insert)
2375 void **slot;
2376 tree lhs;
2377 tree temp;
2378 struct expr_hash_elt element;
2380 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
2381 if (gimple_code (stmt) == GIMPLE_PHI)
2382 lhs = gimple_phi_result (stmt);
2383 else
2384 lhs = gimple_get_lhs (stmt);
2386 initialize_hash_element (stmt, lhs, &element);
2388 if (dump_file && (dump_flags & TDF_DETAILS))
2390 fprintf (dump_file, "LKUP ");
2391 print_expr_hash_elt (dump_file, &element);
2394 /* Don't bother remembering constant assignments and copy operations.
2395 Constants and copy operations are handled by the constant/copy propagator
2396 in optimize_stmt. */
2397 if (element.expr.kind == EXPR_SINGLE
2398 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2399 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2400 return NULL_TREE;
2402 /* Finally try to find the expression in the main expression hash table. */
2403 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2404 (insert ? INSERT : NO_INSERT));
2405 if (slot == NULL)
2406 return NULL_TREE;
2408 if (*slot == NULL)
2410 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2411 *element2 = element;
2412 element2->stamp = element2;
2413 *slot = (void *) element2;
2415 if (dump_file && (dump_flags & TDF_DETAILS))
2417 fprintf (dump_file, "2>>> ");
2418 print_expr_hash_elt (dump_file, element2);
2421 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
2422 return NULL_TREE;
2425 /* Extract the LHS of the assignment so that it can be used as the current
2426 definition of another variable. */
2427 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2429 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2430 use the value from the const_and_copies table. */
2431 if (TREE_CODE (lhs) == SSA_NAME)
2433 temp = SSA_NAME_VALUE (lhs);
2434 if (temp)
2435 lhs = temp;
2438 if (dump_file && (dump_flags & TDF_DETAILS))
2440 fprintf (dump_file, "FIND: ");
2441 print_generic_expr (dump_file, lhs, 0);
2442 fprintf (dump_file, "\n");
2445 return lhs;
2448 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2449 for expressions using the code of the expression and the SSA numbers of
2450 its operands. */
2452 static hashval_t
2453 avail_expr_hash (const void *p)
2455 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2456 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2457 tree vuse;
2458 hashval_t val = 0;
2460 val = iterative_hash_hashable_expr (expr, val);
2462 /* If the hash table entry is not associated with a statement, then we
2463 can just hash the expression and not worry about virtual operands
2464 and such. */
2465 if (!stmt)
2466 return val;
2468 /* Add the SSA version numbers of the vuse operand. This is important
2469 because compound variables like arrays are not renamed in the
2470 operands. Rather, the rename is done on the virtual variable
2471 representing all the elements of the array. */
2472 if ((vuse = gimple_vuse (stmt)))
2473 val = iterative_hash_expr (vuse, val);
2475 return val;
2478 static hashval_t
2479 real_avail_expr_hash (const void *p)
2481 return ((const struct expr_hash_elt *)p)->hash;
2484 static int
2485 avail_expr_eq (const void *p1, const void *p2)
2487 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2488 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2489 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2490 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2491 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2492 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2494 /* This case should apply only when removing entries from the table. */
2495 if (stamp1 == stamp2)
2496 return true;
2498 /* FIXME tuples:
2499 We add stmts to a hash table and them modify them. To detect the case
2500 that we modify a stmt and then search for it, we assume that the hash
2501 is always modified by that change.
2502 We have to fully check why this doesn't happen on trunk or rewrite
2503 this in a more reliable (and easier to understand) way. */
2504 if (((const struct expr_hash_elt *)p1)->hash
2505 != ((const struct expr_hash_elt *)p2)->hash)
2506 return false;
2508 /* In case of a collision, both RHS have to be identical and have the
2509 same VUSE operands. */
2510 if (hashable_expr_equal_p (expr1, expr2)
2511 && types_compatible_p (expr1->type, expr2->type))
2513 /* Note that STMT1 and/or STMT2 may be NULL. */
2514 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2515 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2518 return false;
2521 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2522 up degenerate PHIs created by or exposed by jump threading. */
2524 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2525 NULL. */
2527 tree
2528 degenerate_phi_result (gimple phi)
2530 tree lhs = gimple_phi_result (phi);
2531 tree val = NULL;
2532 size_t i;
2534 /* Ignoring arguments which are the same as LHS, if all the remaining
2535 arguments are the same, then the PHI is a degenerate and has the
2536 value of that common argument. */
2537 for (i = 0; i < gimple_phi_num_args (phi); i++)
2539 tree arg = gimple_phi_arg_def (phi, i);
2541 if (arg == lhs)
2542 continue;
2543 else if (!arg)
2544 break;
2545 else if (!val)
2546 val = arg;
2547 else if (arg == val)
2548 continue;
2549 /* We bring in some of operand_equal_p not only to speed things
2550 up, but also to avoid crashing when dereferencing the type of
2551 a released SSA name. */
2552 else if (TREE_CODE (val) != TREE_CODE (arg)
2553 || TREE_CODE (val) == SSA_NAME
2554 || !operand_equal_p (arg, val, 0))
2555 break;
2557 return (i == gimple_phi_num_args (phi) ? val : NULL);
2560 /* Given a statement STMT, which is either a PHI node or an assignment,
2561 remove it from the IL. */
2563 static void
2564 remove_stmt_or_phi (gimple stmt)
2566 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2568 if (gimple_code (stmt) == GIMPLE_PHI)
2569 remove_phi_node (&gsi, true);
2570 else
2572 gsi_remove (&gsi, true);
2573 release_defs (stmt);
2577 /* Given a statement STMT, which is either a PHI node or an assignment,
2578 return the "rhs" of the node, in the case of a non-degenerate
2579 phi, NULL is returned. */
2581 static tree
2582 get_rhs_or_phi_arg (gimple stmt)
2584 if (gimple_code (stmt) == GIMPLE_PHI)
2585 return degenerate_phi_result (stmt);
2586 else if (gimple_assign_single_p (stmt))
2587 return gimple_assign_rhs1 (stmt);
2588 else
2589 gcc_unreachable ();
2593 /* Given a statement STMT, which is either a PHI node or an assignment,
2594 return the "lhs" of the node. */
2596 static tree
2597 get_lhs_or_phi_result (gimple stmt)
2599 if (gimple_code (stmt) == GIMPLE_PHI)
2600 return gimple_phi_result (stmt);
2601 else if (is_gimple_assign (stmt))
2602 return gimple_assign_lhs (stmt);
2603 else
2604 gcc_unreachable ();
2607 /* Propagate RHS into all uses of LHS (when possible).
2609 RHS and LHS are derived from STMT, which is passed in solely so
2610 that we can remove it if propagation is successful.
2612 When propagating into a PHI node or into a statement which turns
2613 into a trivial copy or constant initialization, set the
2614 appropriate bit in INTERESTING_NAMEs so that we will visit those
2615 nodes as well in an effort to pick up secondary optimization
2616 opportunities. */
2618 static void
2619 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2621 /* First verify that propagation is valid and isn't going to move a
2622 loop variant variable outside its loop. */
2623 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2624 && (TREE_CODE (rhs) != SSA_NAME
2625 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2626 && may_propagate_copy (lhs, rhs)
2627 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2629 use_operand_p use_p;
2630 imm_use_iterator iter;
2631 gimple use_stmt;
2632 bool all = true;
2634 /* Dump details. */
2635 if (dump_file && (dump_flags & TDF_DETAILS))
2637 fprintf (dump_file, " Replacing '");
2638 print_generic_expr (dump_file, lhs, dump_flags);
2639 fprintf (dump_file, "' with %s '",
2640 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2641 print_generic_expr (dump_file, rhs, dump_flags);
2642 fprintf (dump_file, "'\n");
2645 /* Walk over every use of LHS and try to replace the use with RHS.
2646 At this point the only reason why such a propagation would not
2647 be successful would be if the use occurs in an ASM_EXPR. */
2648 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2650 /* Leave debug stmts alone. If we succeed in propagating
2651 all non-debug uses, we'll drop the DEF, and propagation
2652 into debug stmts will occur then. */
2653 if (gimple_debug_bind_p (use_stmt))
2654 continue;
2656 /* It's not always safe to propagate into an ASM_EXPR. */
2657 if (gimple_code (use_stmt) == GIMPLE_ASM
2658 && ! may_propagate_copy_into_asm (lhs))
2660 all = false;
2661 continue;
2664 /* It's not ok to propagate into the definition stmt of RHS.
2665 <bb 9>:
2666 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2667 g_67.1_6 = prephitmp.12_36;
2668 goto <bb 9>;
2669 While this is strictly all dead code we do not want to
2670 deal with this here. */
2671 if (TREE_CODE (rhs) == SSA_NAME
2672 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2674 all = false;
2675 continue;
2678 /* Dump details. */
2679 if (dump_file && (dump_flags & TDF_DETAILS))
2681 fprintf (dump_file, " Original statement:");
2682 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2685 /* Propagate the RHS into this use of the LHS. */
2686 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2687 propagate_value (use_p, rhs);
2689 /* Special cases to avoid useless calls into the folding
2690 routines, operand scanning, etc.
2692 First, propagation into a PHI may cause the PHI to become
2693 a degenerate, so mark the PHI as interesting. No other
2694 actions are necessary.
2696 Second, if we're propagating a virtual operand and the
2697 propagation does not change the underlying _DECL node for
2698 the virtual operand, then no further actions are necessary. */
2699 if (gimple_code (use_stmt) == GIMPLE_PHI
2700 || (! is_gimple_reg (lhs)
2701 && TREE_CODE (rhs) == SSA_NAME
2702 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2704 /* Dump details. */
2705 if (dump_file && (dump_flags & TDF_DETAILS))
2707 fprintf (dump_file, " Updated statement:");
2708 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2711 /* Propagation into a PHI may expose new degenerate PHIs,
2712 so mark the result of the PHI as interesting. */
2713 if (gimple_code (use_stmt) == GIMPLE_PHI)
2715 tree result = get_lhs_or_phi_result (use_stmt);
2716 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2719 continue;
2722 /* From this point onward we are propagating into a
2723 real statement. Folding may (or may not) be possible,
2724 we may expose new operands, expose dead EH edges,
2725 etc. */
2726 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2727 cannot fold a call that simplifies to a constant,
2728 because the GIMPLE_CALL must be replaced by a
2729 GIMPLE_ASSIGN, and there is no way to effect such a
2730 transformation in-place. We might want to consider
2731 using the more general fold_stmt here. */
2733 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
2734 fold_stmt_inplace (&gsi);
2737 /* Sometimes propagation can expose new operands to the
2738 renamer. */
2739 update_stmt (use_stmt);
2741 /* Dump details. */
2742 if (dump_file && (dump_flags & TDF_DETAILS))
2744 fprintf (dump_file, " Updated statement:");
2745 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2748 /* If we replaced a variable index with a constant, then
2749 we would need to update the invariant flag for ADDR_EXPRs. */
2750 if (gimple_assign_single_p (use_stmt)
2751 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2752 recompute_tree_invariant_for_addr_expr
2753 (gimple_assign_rhs1 (use_stmt));
2755 /* If we cleaned up EH information from the statement,
2756 mark its containing block as needing EH cleanups. */
2757 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2759 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2760 if (dump_file && (dump_flags & TDF_DETAILS))
2761 fprintf (dump_file, " Flagged to clear EH edges.\n");
2764 /* Propagation may expose new trivial copy/constant propagation
2765 opportunities. */
2766 if (gimple_assign_single_p (use_stmt)
2767 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2768 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2769 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2771 tree result = get_lhs_or_phi_result (use_stmt);
2772 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2775 /* Propagation into these nodes may make certain edges in
2776 the CFG unexecutable. We want to identify them as PHI nodes
2777 at the destination of those unexecutable edges may become
2778 degenerates. */
2779 else if (gimple_code (use_stmt) == GIMPLE_COND
2780 || gimple_code (use_stmt) == GIMPLE_SWITCH
2781 || gimple_code (use_stmt) == GIMPLE_GOTO)
2783 tree val;
2785 if (gimple_code (use_stmt) == GIMPLE_COND)
2786 val = fold_binary_loc (gimple_location (use_stmt),
2787 gimple_cond_code (use_stmt),
2788 boolean_type_node,
2789 gimple_cond_lhs (use_stmt),
2790 gimple_cond_rhs (use_stmt));
2791 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2792 val = gimple_switch_index (use_stmt);
2793 else
2794 val = gimple_goto_dest (use_stmt);
2796 if (val && is_gimple_min_invariant (val))
2798 basic_block bb = gimple_bb (use_stmt);
2799 edge te = find_taken_edge (bb, val);
2800 edge_iterator ei;
2801 edge e;
2802 gimple_stmt_iterator gsi, psi;
2804 /* Remove all outgoing edges except TE. */
2805 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2807 if (e != te)
2809 /* Mark all the PHI nodes at the destination of
2810 the unexecutable edge as interesting. */
2811 for (psi = gsi_start_phis (e->dest);
2812 !gsi_end_p (psi);
2813 gsi_next (&psi))
2815 gimple phi = gsi_stmt (psi);
2817 tree result = gimple_phi_result (phi);
2818 int version = SSA_NAME_VERSION (result);
2820 bitmap_set_bit (interesting_names, version);
2823 te->probability += e->probability;
2825 te->count += e->count;
2826 remove_edge (e);
2827 cfg_altered = true;
2829 else
2830 ei_next (&ei);
2833 gsi = gsi_last_bb (gimple_bb (use_stmt));
2834 gsi_remove (&gsi, true);
2836 /* And fixup the flags on the single remaining edge. */
2837 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2838 te->flags &= ~EDGE_ABNORMAL;
2839 te->flags |= EDGE_FALLTHRU;
2840 if (te->probability > REG_BR_PROB_BASE)
2841 te->probability = REG_BR_PROB_BASE;
2846 /* Ensure there is nothing else to do. */
2847 gcc_assert (!all || has_zero_uses (lhs));
2849 /* If we were able to propagate away all uses of LHS, then
2850 we can remove STMT. */
2851 if (all)
2852 remove_stmt_or_phi (stmt);
2856 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2857 a statement that is a trivial copy or constant initialization.
2859 Attempt to eliminate T by propagating its RHS into all uses of
2860 its LHS. This may in turn set new bits in INTERESTING_NAMES
2861 for nodes we want to revisit later.
2863 All exit paths should clear INTERESTING_NAMES for the result
2864 of STMT. */
2866 static void
2867 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2869 tree lhs = get_lhs_or_phi_result (stmt);
2870 tree rhs;
2871 int version = SSA_NAME_VERSION (lhs);
2873 /* If the LHS of this statement or PHI has no uses, then we can
2874 just eliminate it. This can occur if, for example, the PHI
2875 was created by block duplication due to threading and its only
2876 use was in the conditional at the end of the block which was
2877 deleted. */
2878 if (has_zero_uses (lhs))
2880 bitmap_clear_bit (interesting_names, version);
2881 remove_stmt_or_phi (stmt);
2882 return;
2885 /* Get the RHS of the assignment or PHI node if the PHI is a
2886 degenerate. */
2887 rhs = get_rhs_or_phi_arg (stmt);
2888 if (!rhs)
2890 bitmap_clear_bit (interesting_names, version);
2891 return;
2894 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2896 /* Note that STMT may well have been deleted by now, so do
2897 not access it, instead use the saved version # to clear
2898 T's entry in the worklist. */
2899 bitmap_clear_bit (interesting_names, version);
2902 /* The first phase in degenerate PHI elimination.
2904 Eliminate the degenerate PHIs in BB, then recurse on the
2905 dominator children of BB. */
2907 static void
2908 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2910 gimple_stmt_iterator gsi;
2911 basic_block son;
2913 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2915 gimple phi = gsi_stmt (gsi);
2917 eliminate_const_or_copy (phi, interesting_names);
2920 /* Recurse into the dominator children of BB. */
2921 for (son = first_dom_son (CDI_DOMINATORS, bb);
2922 son;
2923 son = next_dom_son (CDI_DOMINATORS, son))
2924 eliminate_degenerate_phis_1 (son, interesting_names);
2928 /* A very simple pass to eliminate degenerate PHI nodes from the
2929 IL. This is meant to be fast enough to be able to be run several
2930 times in the optimization pipeline.
2932 Certain optimizations, particularly those which duplicate blocks
2933 or remove edges from the CFG can create or expose PHIs which are
2934 trivial copies or constant initializations.
2936 While we could pick up these optimizations in DOM or with the
2937 combination of copy-prop and CCP, those solutions are far too
2938 heavy-weight for our needs.
2940 This implementation has two phases so that we can efficiently
2941 eliminate the first order degenerate PHIs and second order
2942 degenerate PHIs.
2944 The first phase performs a dominator walk to identify and eliminate
2945 the vast majority of the degenerate PHIs. When a degenerate PHI
2946 is identified and eliminated any affected statements or PHIs
2947 are put on a worklist.
2949 The second phase eliminates degenerate PHIs and trivial copies
2950 or constant initializations using the worklist. This is how we
2951 pick up the secondary optimization opportunities with minimal
2952 cost. */
2954 static unsigned int
2955 eliminate_degenerate_phis (void)
2957 bitmap interesting_names;
2958 bitmap interesting_names1;
2960 /* Bitmap of blocks which need EH information updated. We can not
2961 update it on-the-fly as doing so invalidates the dominator tree. */
2962 need_eh_cleanup = BITMAP_ALLOC (NULL);
2964 /* INTERESTING_NAMES is effectively our worklist, indexed by
2965 SSA_NAME_VERSION.
2967 A set bit indicates that the statement or PHI node which
2968 defines the SSA_NAME should be (re)examined to determine if
2969 it has become a degenerate PHI or trivial const/copy propagation
2970 opportunity.
2972 Experiments have show we generally get better compilation
2973 time behavior with bitmaps rather than sbitmaps. */
2974 interesting_names = BITMAP_ALLOC (NULL);
2975 interesting_names1 = BITMAP_ALLOC (NULL);
2977 calculate_dominance_info (CDI_DOMINATORS);
2978 cfg_altered = false;
2980 /* First phase. Eliminate degenerate PHIs via a dominator
2981 walk of the CFG.
2983 Experiments have indicated that we generally get better
2984 compile-time behavior by visiting blocks in the first
2985 phase in dominator order. Presumably this is because walking
2986 in dominator order leaves fewer PHIs for later examination
2987 by the worklist phase. */
2988 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2990 /* Second phase. Eliminate second order degenerate PHIs as well
2991 as trivial copies or constant initializations identified by
2992 the first phase or this phase. Basically we keep iterating
2993 until our set of INTERESTING_NAMEs is empty. */
2994 while (!bitmap_empty_p (interesting_names))
2996 unsigned int i;
2997 bitmap_iterator bi;
2999 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
3000 changed during the loop. Copy it to another bitmap and
3001 use that. */
3002 bitmap_copy (interesting_names1, interesting_names);
3004 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
3006 tree name = ssa_name (i);
3008 /* Ignore SSA_NAMEs that have been released because
3009 their defining statement was deleted (unreachable). */
3010 if (name)
3011 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
3012 interesting_names);
3016 if (cfg_altered)
3017 free_dominance_info (CDI_DOMINATORS);
3019 /* Propagation of const and copies may make some EH edges dead. Purge
3020 such edges from the CFG as needed. */
3021 if (!bitmap_empty_p (need_eh_cleanup))
3023 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
3024 BITMAP_FREE (need_eh_cleanup);
3027 BITMAP_FREE (interesting_names);
3028 BITMAP_FREE (interesting_names1);
3029 return 0;
3032 struct gimple_opt_pass pass_phi_only_cprop =
3035 GIMPLE_PASS,
3036 "phicprop", /* name */
3037 gate_dominator, /* gate */
3038 eliminate_degenerate_phis, /* execute */
3039 NULL, /* sub */
3040 NULL, /* next */
3041 0, /* static_pass_number */
3042 TV_TREE_PHI_CPROP, /* tv_id */
3043 PROP_cfg | PROP_ssa, /* properties_required */
3044 0, /* properties_provided */
3045 0, /* properties_destroyed */
3046 0, /* todo_flags_start */
3047 TODO_cleanup_cfg
3048 | TODO_ggc_collect
3049 | TODO_verify_ssa
3050 | TODO_verify_stmts
3051 | TODO_update_ssa /* todo_flags_finish */