re PR c++/19476 (Missed null checking elimination with new)
[official-gcc.git] / gcc / tree-ssa-dom.c
blob7013f43e6a61d3e8a679cb5f689b836534eb0f0a
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "function.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-ssa.h"
34 #include "domwalk.h"
35 #include "tree-pass.h"
36 #include "tree-ssa-propagate.h"
37 #include "tree-ssa-threadupdate.h"
38 #include "langhooks.h"
39 #include "params.h"
41 /* This file implements optimizations on the dominator tree. */
43 /* Representation of a "naked" right-hand-side expression, to be used
44 in recording available expressions in the expression hash table. */
46 enum expr_kind
48 EXPR_SINGLE,
49 EXPR_UNARY,
50 EXPR_BINARY,
51 EXPR_TERNARY,
52 EXPR_CALL,
53 EXPR_PHI
56 struct hashable_expr
58 tree type;
59 enum expr_kind kind;
60 union {
61 struct { tree rhs; } single;
62 struct { enum tree_code op; tree opnd; } unary;
63 struct { enum tree_code op; tree opnd0, opnd1; } binary;
64 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
65 struct { gimple fn_from; bool pure; size_t nargs; tree *args; } call;
66 struct { size_t nargs; tree *args; } phi;
67 } ops;
70 /* Structure for recording known values of a conditional expression
71 at the exits from its block. */
73 typedef struct cond_equivalence_s
75 struct hashable_expr cond;
76 tree value;
77 } cond_equivalence;
80 /* Structure for recording edge equivalences as well as any pending
81 edge redirections during the dominator optimizer.
83 Computing and storing the edge equivalences instead of creating
84 them on-demand can save significant amounts of time, particularly
85 for pathological cases involving switch statements.
87 These structures live for a single iteration of the dominator
88 optimizer in the edge's AUX field. At the end of an iteration we
89 free each of these structures and update the AUX field to point
90 to any requested redirection target (the code for updating the
91 CFG and SSA graph for edge redirection expects redirection edge
92 targets to be in the AUX field for each edge. */
94 struct edge_info
96 /* If this edge creates a simple equivalence, the LHS and RHS of
97 the equivalence will be stored here. */
98 tree lhs;
99 tree rhs;
101 /* Traversing an edge may also indicate one or more particular conditions
102 are true or false. */
103 vec<cond_equivalence> cond_equivalences;
106 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
107 expressions it enters into the hash table along with a marker entry
108 (null). When we finish processing the block, we pop off entries and
109 remove the expressions from the global hash table until we hit the
110 marker. */
111 typedef struct expr_hash_elt * expr_hash_elt_t;
113 static vec<expr_hash_elt_t> avail_exprs_stack;
115 /* Structure for entries in the expression hash table. */
117 struct expr_hash_elt
119 /* The value (lhs) of this expression. */
120 tree lhs;
122 /* The expression (rhs) we want to record. */
123 struct hashable_expr expr;
125 /* The stmt pointer if this element corresponds to a statement. */
126 gimple stmt;
128 /* The hash value for RHS. */
129 hashval_t hash;
131 /* A unique stamp, typically the address of the hash
132 element itself, used in removing entries from the table. */
133 struct expr_hash_elt *stamp;
136 /* Hashtable helpers. */
138 static bool hashable_expr_equal_p (const struct hashable_expr *,
139 const struct hashable_expr *);
140 static void free_expr_hash_elt (void *);
142 struct expr_elt_hasher
144 typedef expr_hash_elt value_type;
145 typedef expr_hash_elt compare_type;
146 static inline hashval_t hash (const value_type *);
147 static inline bool equal (const value_type *, const compare_type *);
148 static inline void remove (value_type *);
151 inline hashval_t
152 expr_elt_hasher::hash (const value_type *p)
154 return p->hash;
157 inline bool
158 expr_elt_hasher::equal (const value_type *p1, const compare_type *p2)
160 gimple stmt1 = p1->stmt;
161 const struct hashable_expr *expr1 = &p1->expr;
162 const struct expr_hash_elt *stamp1 = p1->stamp;
163 gimple stmt2 = p2->stmt;
164 const struct hashable_expr *expr2 = &p2->expr;
165 const struct expr_hash_elt *stamp2 = p2->stamp;
167 /* This case should apply only when removing entries from the table. */
168 if (stamp1 == stamp2)
169 return true;
171 /* FIXME tuples:
172 We add stmts to a hash table and them modify them. To detect the case
173 that we modify a stmt and then search for it, we assume that the hash
174 is always modified by that change.
175 We have to fully check why this doesn't happen on trunk or rewrite
176 this in a more reliable (and easier to understand) way. */
177 if (((const struct expr_hash_elt *)p1)->hash
178 != ((const struct expr_hash_elt *)p2)->hash)
179 return false;
181 /* In case of a collision, both RHS have to be identical and have the
182 same VUSE operands. */
183 if (hashable_expr_equal_p (expr1, expr2)
184 && types_compatible_p (expr1->type, expr2->type))
186 /* Note that STMT1 and/or STMT2 may be NULL. */
187 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
188 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
191 return false;
194 /* Delete an expr_hash_elt and reclaim its storage. */
196 inline void
197 expr_elt_hasher::remove (value_type *element)
199 free_expr_hash_elt (element);
202 /* Hash table with expressions made available during the renaming process.
203 When an assignment of the form X_i = EXPR is found, the statement is
204 stored in this table. If the same expression EXPR is later found on the
205 RHS of another statement, it is replaced with X_i (thus performing
206 global redundancy elimination). Similarly as we pass through conditionals
207 we record the conditional itself as having either a true or false value
208 in this table. */
209 static hash_table <expr_elt_hasher> avail_exprs;
211 /* Stack of dest,src pairs that need to be restored during finalization.
213 A NULL entry is used to mark the end of pairs which need to be
214 restored during finalization of this block. */
215 static vec<tree> const_and_copies_stack;
217 /* Track whether or not we have changed the control flow graph. */
218 static bool cfg_altered;
220 /* Bitmap of blocks that have had EH statements cleaned. We should
221 remove their dead edges eventually. */
222 static bitmap need_eh_cleanup;
224 /* Statistics for dominator optimizations. */
225 struct opt_stats_d
227 long num_stmts;
228 long num_exprs_considered;
229 long num_re;
230 long num_const_prop;
231 long num_copy_prop;
234 static struct opt_stats_d opt_stats;
236 /* Local functions. */
237 static void optimize_stmt (basic_block, gimple_stmt_iterator);
238 static tree lookup_avail_expr (gimple, bool);
239 static hashval_t avail_expr_hash (const void *);
240 static void htab_statistics (FILE *, hash_table <expr_elt_hasher>);
241 static void record_cond (cond_equivalence *);
242 static void record_const_or_copy (tree, tree);
243 static void record_equality (tree, tree);
244 static void record_equivalences_from_phis (basic_block);
245 static void record_equivalences_from_incoming_edge (basic_block);
246 static void eliminate_redundant_computations (gimple_stmt_iterator *);
247 static void record_equivalences_from_stmt (gimple, int);
248 static void remove_local_expressions_from_table (void);
249 static void restore_vars_to_original_value (void);
250 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
253 /* Given a statement STMT, initialize the hash table element pointed to
254 by ELEMENT. */
256 static void
257 initialize_hash_element (gimple stmt, tree lhs,
258 struct expr_hash_elt *element)
260 enum gimple_code code = gimple_code (stmt);
261 struct hashable_expr *expr = &element->expr;
263 if (code == GIMPLE_ASSIGN)
265 enum tree_code subcode = gimple_assign_rhs_code (stmt);
267 switch (get_gimple_rhs_class (subcode))
269 case GIMPLE_SINGLE_RHS:
270 expr->kind = EXPR_SINGLE;
271 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
272 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
273 break;
274 case GIMPLE_UNARY_RHS:
275 expr->kind = EXPR_UNARY;
276 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
277 expr->ops.unary.op = subcode;
278 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
279 break;
280 case GIMPLE_BINARY_RHS:
281 expr->kind = EXPR_BINARY;
282 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
283 expr->ops.binary.op = subcode;
284 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
285 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
286 break;
287 case GIMPLE_TERNARY_RHS:
288 expr->kind = EXPR_TERNARY;
289 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
290 expr->ops.ternary.op = subcode;
291 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
292 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
293 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
294 break;
295 default:
296 gcc_unreachable ();
299 else if (code == GIMPLE_COND)
301 expr->type = boolean_type_node;
302 expr->kind = EXPR_BINARY;
303 expr->ops.binary.op = gimple_cond_code (stmt);
304 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
305 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
307 else if (code == GIMPLE_CALL)
309 size_t nargs = gimple_call_num_args (stmt);
310 size_t i;
312 gcc_assert (gimple_call_lhs (stmt));
314 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
315 expr->kind = EXPR_CALL;
316 expr->ops.call.fn_from = stmt;
318 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
319 expr->ops.call.pure = true;
320 else
321 expr->ops.call.pure = false;
323 expr->ops.call.nargs = nargs;
324 expr->ops.call.args = XCNEWVEC (tree, nargs);
325 for (i = 0; i < nargs; i++)
326 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
328 else if (code == GIMPLE_SWITCH)
330 expr->type = TREE_TYPE (gimple_switch_index (stmt));
331 expr->kind = EXPR_SINGLE;
332 expr->ops.single.rhs = gimple_switch_index (stmt);
334 else if (code == GIMPLE_GOTO)
336 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
337 expr->kind = EXPR_SINGLE;
338 expr->ops.single.rhs = gimple_goto_dest (stmt);
340 else if (code == GIMPLE_PHI)
342 size_t nargs = gimple_phi_num_args (stmt);
343 size_t i;
345 expr->type = TREE_TYPE (gimple_phi_result (stmt));
346 expr->kind = EXPR_PHI;
347 expr->ops.phi.nargs = nargs;
348 expr->ops.phi.args = XCNEWVEC (tree, nargs);
350 for (i = 0; i < nargs; i++)
351 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
353 else
354 gcc_unreachable ();
356 element->lhs = lhs;
357 element->stmt = stmt;
358 element->hash = avail_expr_hash (element);
359 element->stamp = element;
362 /* Given a conditional expression COND as a tree, initialize
363 a hashable_expr expression EXPR. The conditional must be a
364 comparison or logical negation. A constant or a variable is
365 not permitted. */
367 static void
368 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
370 expr->type = boolean_type_node;
372 if (COMPARISON_CLASS_P (cond))
374 expr->kind = EXPR_BINARY;
375 expr->ops.binary.op = TREE_CODE (cond);
376 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
377 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
379 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
381 expr->kind = EXPR_UNARY;
382 expr->ops.unary.op = TRUTH_NOT_EXPR;
383 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
385 else
386 gcc_unreachable ();
389 /* Given a hashable_expr expression EXPR and an LHS,
390 initialize the hash table element pointed to by ELEMENT. */
392 static void
393 initialize_hash_element_from_expr (struct hashable_expr *expr,
394 tree lhs,
395 struct expr_hash_elt *element)
397 element->expr = *expr;
398 element->lhs = lhs;
399 element->stmt = NULL;
400 element->hash = avail_expr_hash (element);
401 element->stamp = element;
404 /* Compare two hashable_expr structures for equivalence.
405 They are considered equivalent when the the expressions
406 they denote must necessarily be equal. The logic is intended
407 to follow that of operand_equal_p in fold-const.c */
409 static bool
410 hashable_expr_equal_p (const struct hashable_expr *expr0,
411 const struct hashable_expr *expr1)
413 tree type0 = expr0->type;
414 tree type1 = expr1->type;
416 /* If either type is NULL, there is nothing to check. */
417 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
418 return false;
420 /* If both types don't have the same signedness, precision, and mode,
421 then we can't consider them equal. */
422 if (type0 != type1
423 && (TREE_CODE (type0) == ERROR_MARK
424 || TREE_CODE (type1) == ERROR_MARK
425 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
426 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
427 || TYPE_MODE (type0) != TYPE_MODE (type1)))
428 return false;
430 if (expr0->kind != expr1->kind)
431 return false;
433 switch (expr0->kind)
435 case EXPR_SINGLE:
436 return operand_equal_p (expr0->ops.single.rhs,
437 expr1->ops.single.rhs, 0);
439 case EXPR_UNARY:
440 if (expr0->ops.unary.op != expr1->ops.unary.op)
441 return false;
443 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
444 || expr0->ops.unary.op == NON_LVALUE_EXPR)
445 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
446 return false;
448 return operand_equal_p (expr0->ops.unary.opnd,
449 expr1->ops.unary.opnd, 0);
451 case EXPR_BINARY:
452 if (expr0->ops.binary.op != expr1->ops.binary.op)
453 return false;
455 if (operand_equal_p (expr0->ops.binary.opnd0,
456 expr1->ops.binary.opnd0, 0)
457 && operand_equal_p (expr0->ops.binary.opnd1,
458 expr1->ops.binary.opnd1, 0))
459 return true;
461 /* For commutative ops, allow the other order. */
462 return (commutative_tree_code (expr0->ops.binary.op)
463 && operand_equal_p (expr0->ops.binary.opnd0,
464 expr1->ops.binary.opnd1, 0)
465 && operand_equal_p (expr0->ops.binary.opnd1,
466 expr1->ops.binary.opnd0, 0));
468 case EXPR_TERNARY:
469 if (expr0->ops.ternary.op != expr1->ops.ternary.op
470 || !operand_equal_p (expr0->ops.ternary.opnd2,
471 expr1->ops.ternary.opnd2, 0))
472 return false;
474 if (operand_equal_p (expr0->ops.ternary.opnd0,
475 expr1->ops.ternary.opnd0, 0)
476 && operand_equal_p (expr0->ops.ternary.opnd1,
477 expr1->ops.ternary.opnd1, 0))
478 return true;
480 /* For commutative ops, allow the other order. */
481 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
482 && operand_equal_p (expr0->ops.ternary.opnd0,
483 expr1->ops.ternary.opnd1, 0)
484 && operand_equal_p (expr0->ops.ternary.opnd1,
485 expr1->ops.ternary.opnd0, 0));
487 case EXPR_CALL:
489 size_t i;
491 /* If the calls are to different functions, then they
492 clearly cannot be equal. */
493 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
494 expr1->ops.call.fn_from))
495 return false;
497 if (! expr0->ops.call.pure)
498 return false;
500 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
501 return false;
503 for (i = 0; i < expr0->ops.call.nargs; i++)
504 if (! operand_equal_p (expr0->ops.call.args[i],
505 expr1->ops.call.args[i], 0))
506 return false;
508 return true;
511 case EXPR_PHI:
513 size_t i;
515 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
516 return false;
518 for (i = 0; i < expr0->ops.phi.nargs; i++)
519 if (! operand_equal_p (expr0->ops.phi.args[i],
520 expr1->ops.phi.args[i], 0))
521 return false;
523 return true;
526 default:
527 gcc_unreachable ();
531 /* Compute a hash value for a hashable_expr value EXPR and a
532 previously accumulated hash value VAL. If two hashable_expr
533 values compare equal with hashable_expr_equal_p, they must
534 hash to the same value, given an identical value of VAL.
535 The logic is intended to follow iterative_hash_expr in tree.c. */
537 static hashval_t
538 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
540 switch (expr->kind)
542 case EXPR_SINGLE:
543 val = iterative_hash_expr (expr->ops.single.rhs, val);
544 break;
546 case EXPR_UNARY:
547 val = iterative_hash_object (expr->ops.unary.op, val);
549 /* Make sure to include signedness in the hash computation.
550 Don't hash the type, that can lead to having nodes which
551 compare equal according to operand_equal_p, but which
552 have different hash codes. */
553 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
554 || expr->ops.unary.op == NON_LVALUE_EXPR)
555 val += TYPE_UNSIGNED (expr->type);
557 val = iterative_hash_expr (expr->ops.unary.opnd, val);
558 break;
560 case EXPR_BINARY:
561 val = iterative_hash_object (expr->ops.binary.op, val);
562 if (commutative_tree_code (expr->ops.binary.op))
563 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
564 expr->ops.binary.opnd1, val);
565 else
567 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
568 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
570 break;
572 case EXPR_TERNARY:
573 val = iterative_hash_object (expr->ops.ternary.op, val);
574 if (commutative_ternary_tree_code (expr->ops.ternary.op))
575 val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
576 expr->ops.ternary.opnd1, val);
577 else
579 val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
580 val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
582 val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
583 break;
585 case EXPR_CALL:
587 size_t i;
588 enum tree_code code = CALL_EXPR;
589 gimple fn_from;
591 val = iterative_hash_object (code, val);
592 fn_from = expr->ops.call.fn_from;
593 if (gimple_call_internal_p (fn_from))
594 val = iterative_hash_hashval_t
595 ((hashval_t) gimple_call_internal_fn (fn_from), val);
596 else
597 val = iterative_hash_expr (gimple_call_fn (fn_from), val);
598 for (i = 0; i < expr->ops.call.nargs; i++)
599 val = iterative_hash_expr (expr->ops.call.args[i], val);
601 break;
603 case EXPR_PHI:
605 size_t i;
607 for (i = 0; i < expr->ops.phi.nargs; i++)
608 val = iterative_hash_expr (expr->ops.phi.args[i], val);
610 break;
612 default:
613 gcc_unreachable ();
616 return val;
619 /* Print a diagnostic dump of an expression hash table entry. */
621 static void
622 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
624 if (element->stmt)
625 fprintf (stream, "STMT ");
626 else
627 fprintf (stream, "COND ");
629 if (element->lhs)
631 print_generic_expr (stream, element->lhs, 0);
632 fprintf (stream, " = ");
635 switch (element->expr.kind)
637 case EXPR_SINGLE:
638 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
639 break;
641 case EXPR_UNARY:
642 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
643 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
644 break;
646 case EXPR_BINARY:
647 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
648 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
649 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
650 break;
652 case EXPR_TERNARY:
653 fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
654 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
655 fputs (", ", stream);
656 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
657 fputs (", ", stream);
658 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
659 fputs (">", stream);
660 break;
662 case EXPR_CALL:
664 size_t i;
665 size_t nargs = element->expr.ops.call.nargs;
666 gimple fn_from;
668 fn_from = element->expr.ops.call.fn_from;
669 if (gimple_call_internal_p (fn_from))
670 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
671 stream);
672 else
673 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
674 fprintf (stream, " (");
675 for (i = 0; i < nargs; i++)
677 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
678 if (i + 1 < nargs)
679 fprintf (stream, ", ");
681 fprintf (stream, ")");
683 break;
685 case EXPR_PHI:
687 size_t i;
688 size_t nargs = element->expr.ops.phi.nargs;
690 fprintf (stream, "PHI <");
691 for (i = 0; i < nargs; i++)
693 print_generic_expr (stream, element->expr.ops.phi.args[i], 0);
694 if (i + 1 < nargs)
695 fprintf (stream, ", ");
697 fprintf (stream, ">");
699 break;
701 fprintf (stream, "\n");
703 if (element->stmt)
705 fprintf (stream, " ");
706 print_gimple_stmt (stream, element->stmt, 0, 0);
710 /* Delete variable sized pieces of the expr_hash_elt ELEMENT. */
712 static void
713 free_expr_hash_elt_contents (struct expr_hash_elt *element)
715 if (element->expr.kind == EXPR_CALL)
716 free (element->expr.ops.call.args);
717 else if (element->expr.kind == EXPR_PHI)
718 free (element->expr.ops.phi.args);
721 /* Delete an expr_hash_elt and reclaim its storage. */
723 static void
724 free_expr_hash_elt (void *elt)
726 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
727 free_expr_hash_elt_contents (element);
728 free (element);
731 /* Allocate an EDGE_INFO for edge E and attach it to E.
732 Return the new EDGE_INFO structure. */
734 static struct edge_info *
735 allocate_edge_info (edge e)
737 struct edge_info *edge_info;
739 edge_info = XCNEW (struct edge_info);
741 e->aux = edge_info;
742 return edge_info;
745 /* Free all EDGE_INFO structures associated with edges in the CFG.
746 If a particular edge can be threaded, copy the redirection
747 target from the EDGE_INFO structure into the edge's AUX field
748 as required by code to update the CFG and SSA graph for
749 jump threading. */
751 static void
752 free_all_edge_infos (void)
754 basic_block bb;
755 edge_iterator ei;
756 edge e;
758 FOR_EACH_BB (bb)
760 FOR_EACH_EDGE (e, ei, bb->preds)
762 struct edge_info *edge_info = (struct edge_info *) e->aux;
764 if (edge_info)
766 edge_info->cond_equivalences.release ();
767 free (edge_info);
768 e->aux = NULL;
774 class dom_opt_dom_walker : public dom_walker
776 public:
777 dom_opt_dom_walker (cdi_direction direction)
778 : dom_walker (direction), m_dummy_cond (NULL) {}
780 virtual void before_dom_children (basic_block);
781 virtual void after_dom_children (basic_block);
783 private:
784 void thread_across_edge (edge);
786 gimple m_dummy_cond;
789 /* Jump threading, redundancy elimination and const/copy propagation.
791 This pass may expose new symbols that need to be renamed into SSA. For
792 every new symbol exposed, its corresponding bit will be set in
793 VARS_TO_RENAME. */
795 static unsigned int
796 tree_ssa_dominator_optimize (void)
798 memset (&opt_stats, 0, sizeof (opt_stats));
800 /* Create our hash tables. */
801 avail_exprs.create (1024);
802 avail_exprs_stack.create (20);
803 const_and_copies_stack.create (20);
804 need_eh_cleanup = BITMAP_ALLOC (NULL);
806 calculate_dominance_info (CDI_DOMINATORS);
807 cfg_altered = false;
809 /* We need to know loop structures in order to avoid destroying them
810 in jump threading. Note that we still can e.g. thread through loop
811 headers to an exit edge, or through loop header to the loop body, assuming
812 that we update the loop info. */
813 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
815 /* Initialize the value-handle array. */
816 threadedge_initialize_values ();
818 /* We need accurate information regarding back edges in the CFG
819 for jump threading; this may include back edges that are not part of
820 a single loop. */
821 mark_dfs_back_edges ();
823 /* Recursively walk the dominator tree optimizing statements. */
824 dom_opt_dom_walker (CDI_DOMINATORS).walk (cfun->cfg->x_entry_block_ptr);
827 gimple_stmt_iterator gsi;
828 basic_block bb;
829 FOR_EACH_BB (bb)
831 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
832 update_stmt_if_modified (gsi_stmt (gsi));
836 /* If we exposed any new variables, go ahead and put them into
837 SSA form now, before we handle jump threading. This simplifies
838 interactions between rewriting of _DECL nodes into SSA form
839 and rewriting SSA_NAME nodes into SSA form after block
840 duplication and CFG manipulation. */
841 update_ssa (TODO_update_ssa);
843 free_all_edge_infos ();
845 /* Thread jumps, creating duplicate blocks as needed. */
846 cfg_altered |= thread_through_all_blocks (first_pass_instance);
848 if (cfg_altered)
849 free_dominance_info (CDI_DOMINATORS);
851 /* Removal of statements may make some EH edges dead. Purge
852 such edges from the CFG as needed. */
853 if (!bitmap_empty_p (need_eh_cleanup))
855 unsigned i;
856 bitmap_iterator bi;
858 /* Jump threading may have created forwarder blocks from blocks
859 needing EH cleanup; the new successor of these blocks, which
860 has inherited from the original block, needs the cleanup.
861 Don't clear bits in the bitmap, as that can break the bitmap
862 iterator. */
863 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
865 basic_block bb = BASIC_BLOCK (i);
866 if (bb == NULL)
867 continue;
868 while (single_succ_p (bb)
869 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
870 bb = single_succ (bb);
871 if (bb == EXIT_BLOCK_PTR)
872 continue;
873 if ((unsigned) bb->index != i)
874 bitmap_set_bit (need_eh_cleanup, bb->index);
877 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
878 bitmap_clear (need_eh_cleanup);
881 statistics_counter_event (cfun, "Redundant expressions eliminated",
882 opt_stats.num_re);
883 statistics_counter_event (cfun, "Constants propagated",
884 opt_stats.num_const_prop);
885 statistics_counter_event (cfun, "Copies propagated",
886 opt_stats.num_copy_prop);
888 /* Debugging dumps. */
889 if (dump_file && (dump_flags & TDF_STATS))
890 dump_dominator_optimization_stats (dump_file);
892 loop_optimizer_finalize ();
894 /* Delete our main hashtable. */
895 avail_exprs.dispose ();
897 /* Free asserted bitmaps and stacks. */
898 BITMAP_FREE (need_eh_cleanup);
900 avail_exprs_stack.release ();
901 const_and_copies_stack.release ();
903 /* Free the value-handle array. */
904 threadedge_finalize_values ();
905 ssa_name_values.release ();
907 return 0;
910 static bool
911 gate_dominator (void)
913 return flag_tree_dom != 0;
916 namespace {
918 const pass_data pass_data_dominator =
920 GIMPLE_PASS, /* type */
921 "dom", /* name */
922 OPTGROUP_NONE, /* optinfo_flags */
923 true, /* has_gate */
924 true, /* has_execute */
925 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
926 ( PROP_cfg | PROP_ssa ), /* properties_required */
927 0, /* properties_provided */
928 0, /* properties_destroyed */
929 0, /* todo_flags_start */
930 ( TODO_cleanup_cfg | TODO_update_ssa
931 | TODO_verify_ssa
932 | TODO_verify_flow ), /* todo_flags_finish */
935 class pass_dominator : public gimple_opt_pass
937 public:
938 pass_dominator (gcc::context *ctxt)
939 : gimple_opt_pass (pass_data_dominator, ctxt)
942 /* opt_pass methods: */
943 opt_pass * clone () { return new pass_dominator (m_ctxt); }
944 bool gate () { return gate_dominator (); }
945 unsigned int execute () { return tree_ssa_dominator_optimize (); }
947 }; // class pass_dominator
949 } // anon namespace
951 gimple_opt_pass *
952 make_pass_dominator (gcc::context *ctxt)
954 return new pass_dominator (ctxt);
958 /* Given a conditional statement CONDSTMT, convert the
959 condition to a canonical form. */
961 static void
962 canonicalize_comparison (gimple condstmt)
964 tree op0;
965 tree op1;
966 enum tree_code code;
968 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
970 op0 = gimple_cond_lhs (condstmt);
971 op1 = gimple_cond_rhs (condstmt);
973 code = gimple_cond_code (condstmt);
975 /* If it would be profitable to swap the operands, then do so to
976 canonicalize the statement, enabling better optimization.
978 By placing canonicalization of such expressions here we
979 transparently keep statements in canonical form, even
980 when the statement is modified. */
981 if (tree_swap_operands_p (op0, op1, false))
983 /* For relationals we need to swap the operands
984 and change the code. */
985 if (code == LT_EXPR
986 || code == GT_EXPR
987 || code == LE_EXPR
988 || code == GE_EXPR)
990 code = swap_tree_comparison (code);
992 gimple_cond_set_code (condstmt, code);
993 gimple_cond_set_lhs (condstmt, op1);
994 gimple_cond_set_rhs (condstmt, op0);
996 update_stmt (condstmt);
1001 /* Initialize local stacks for this optimizer and record equivalences
1002 upon entry to BB. Equivalences can come from the edge traversed to
1003 reach BB or they may come from PHI nodes at the start of BB. */
1005 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
1006 LIMIT entries left in LOCALs. */
1008 static void
1009 remove_local_expressions_from_table (void)
1011 /* Remove all the expressions made available in this block. */
1012 while (avail_exprs_stack.length () > 0)
1014 expr_hash_elt_t victim = avail_exprs_stack.pop ();
1015 expr_hash_elt **slot;
1017 if (victim == NULL)
1018 break;
1020 /* This must precede the actual removal from the hash table,
1021 as ELEMENT and the table entry may share a call argument
1022 vector which will be freed during removal. */
1023 if (dump_file && (dump_flags & TDF_DETAILS))
1025 fprintf (dump_file, "<<<< ");
1026 print_expr_hash_elt (dump_file, victim);
1029 slot = avail_exprs.find_slot_with_hash (victim, victim->hash, NO_INSERT);
1030 gcc_assert (slot && *slot == victim);
1031 avail_exprs.clear_slot (slot);
1035 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
1036 CONST_AND_COPIES to its original state, stopping when we hit a
1037 NULL marker. */
1039 static void
1040 restore_vars_to_original_value (void)
1042 while (const_and_copies_stack.length () > 0)
1044 tree prev_value, dest;
1046 dest = const_and_copies_stack.pop ();
1048 if (dest == NULL)
1049 break;
1051 if (dump_file && (dump_flags & TDF_DETAILS))
1053 fprintf (dump_file, "<<<< COPY ");
1054 print_generic_expr (dump_file, dest, 0);
1055 fprintf (dump_file, " = ");
1056 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
1057 fprintf (dump_file, "\n");
1060 prev_value = const_and_copies_stack.pop ();
1061 set_ssa_name_value (dest, prev_value);
1065 /* A trivial wrapper so that we can present the generic jump
1066 threading code with a simple API for simplifying statements. */
1067 static tree
1068 simplify_stmt_for_jump_threading (gimple stmt,
1069 gimple within_stmt ATTRIBUTE_UNUSED)
1071 return lookup_avail_expr (stmt, false);
1074 /* Record into the equivalence tables any equivalences implied by
1075 traversing edge E (which are cached in E->aux).
1077 Callers are responsible for managing the unwinding markers. */
1078 static void
1079 record_temporary_equivalences (edge e)
1081 int i;
1082 struct edge_info *edge_info = (struct edge_info *) e->aux;
1084 /* If we have info associated with this edge, record it into
1085 our equivalence tables. */
1086 if (edge_info)
1088 cond_equivalence *eq;
1089 tree lhs = edge_info->lhs;
1090 tree rhs = edge_info->rhs;
1092 /* If we have a simple NAME = VALUE equivalence, record it. */
1093 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1094 record_const_or_copy (lhs, rhs);
1096 /* If we have 0 = COND or 1 = COND equivalences, record them
1097 into our expression hash tables. */
1098 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
1099 record_cond (eq);
1103 /* Wrapper for common code to attempt to thread an edge. For example,
1104 it handles lazily building the dummy condition and the bookkeeping
1105 when jump threading is successful. */
1107 void
1108 dom_opt_dom_walker::thread_across_edge (edge e)
1110 if (! m_dummy_cond)
1111 m_dummy_cond =
1112 gimple_build_cond (NE_EXPR,
1113 integer_zero_node, integer_zero_node,
1114 NULL, NULL);
1116 /* Push a marker on both stacks so we can unwind the tables back to their
1117 current state. */
1118 avail_exprs_stack.safe_push (NULL);
1119 const_and_copies_stack.safe_push (NULL_TREE);
1121 /* Traversing E may result in equivalences we can utilize. */
1122 record_temporary_equivalences (e);
1124 /* With all the edge equivalences in the tables, go ahead and attempt
1125 to thread through E->dest. */
1126 ::thread_across_edge (m_dummy_cond, e, false,
1127 &const_and_copies_stack,
1128 simplify_stmt_for_jump_threading);
1130 /* And restore the various tables to their state before
1131 we threaded this edge.
1133 XXX The code in tree-ssa-threadedge.c will restore the state of
1134 the const_and_copies table. We we just have to restore the expression
1135 table. */
1136 remove_local_expressions_from_table ();
1139 /* PHI nodes can create equivalences too.
1141 Ignoring any alternatives which are the same as the result, if
1142 all the alternatives are equal, then the PHI node creates an
1143 equivalence. */
1145 static void
1146 record_equivalences_from_phis (basic_block bb)
1148 gimple_stmt_iterator gsi;
1150 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1152 gimple phi = gsi_stmt (gsi);
1154 tree lhs = gimple_phi_result (phi);
1155 tree rhs = NULL;
1156 size_t i;
1158 for (i = 0; i < gimple_phi_num_args (phi); i++)
1160 tree t = gimple_phi_arg_def (phi, i);
1162 /* Ignore alternatives which are the same as our LHS. Since
1163 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1164 can simply compare pointers. */
1165 if (lhs == t)
1166 continue;
1168 /* If we have not processed an alternative yet, then set
1169 RHS to this alternative. */
1170 if (rhs == NULL)
1171 rhs = t;
1172 /* If we have processed an alternative (stored in RHS), then
1173 see if it is equal to this one. If it isn't, then stop
1174 the search. */
1175 else if (! operand_equal_for_phi_arg_p (rhs, t))
1176 break;
1179 /* If we had no interesting alternatives, then all the RHS alternatives
1180 must have been the same as LHS. */
1181 if (!rhs)
1182 rhs = lhs;
1184 /* If we managed to iterate through each PHI alternative without
1185 breaking out of the loop, then we have a PHI which may create
1186 a useful equivalence. We do not need to record unwind data for
1187 this, since this is a true assignment and not an equivalence
1188 inferred from a comparison. All uses of this ssa name are dominated
1189 by this assignment, so unwinding just costs time and space. */
1190 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
1191 set_ssa_name_value (lhs, rhs);
1195 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1196 return that edge. Otherwise return NULL. */
1197 static edge
1198 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1200 edge retval = NULL;
1201 edge e;
1202 edge_iterator ei;
1204 FOR_EACH_EDGE (e, ei, bb->preds)
1206 /* A loop back edge can be identified by the destination of
1207 the edge dominating the source of the edge. */
1208 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1209 continue;
1211 /* If we have already seen a non-loop edge, then we must have
1212 multiple incoming non-loop edges and thus we return NULL. */
1213 if (retval)
1214 return NULL;
1216 /* This is the first non-loop incoming edge we have found. Record
1217 it. */
1218 retval = e;
1221 return retval;
1224 /* Record any equivalences created by the incoming edge to BB. If BB
1225 has more than one incoming edge, then no equivalence is created. */
1227 static void
1228 record_equivalences_from_incoming_edge (basic_block bb)
1230 edge e;
1231 basic_block parent;
1232 struct edge_info *edge_info;
1234 /* If our parent block ended with a control statement, then we may be
1235 able to record some equivalences based on which outgoing edge from
1236 the parent was followed. */
1237 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1239 e = single_incoming_edge_ignoring_loop_edges (bb);
1241 /* If we had a single incoming edge from our parent block, then enter
1242 any data associated with the edge into our tables. */
1243 if (e && e->src == parent)
1245 unsigned int i;
1247 edge_info = (struct edge_info *) e->aux;
1249 if (edge_info)
1251 tree lhs = edge_info->lhs;
1252 tree rhs = edge_info->rhs;
1253 cond_equivalence *eq;
1255 if (lhs)
1256 record_equality (lhs, rhs);
1258 /* If LHS is an SSA_NAME and RHS is a constant integer and LHS was
1259 set via a widening type conversion, then we may be able to record
1260 additional equivalences. */
1261 if (lhs
1262 && TREE_CODE (lhs) == SSA_NAME
1263 && is_gimple_constant (rhs)
1264 && TREE_CODE (rhs) == INTEGER_CST)
1266 gimple defstmt = SSA_NAME_DEF_STMT (lhs);
1268 if (defstmt
1269 && is_gimple_assign (defstmt)
1270 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (defstmt)))
1272 tree old_rhs = gimple_assign_rhs1 (defstmt);
1274 /* If the conversion widens the original value and
1275 the constant is in the range of the type of OLD_RHS,
1276 then convert the constant and record the equivalence.
1278 Note that int_fits_type_p does not check the precision
1279 if the upper and lower bounds are OK. */
1280 if (INTEGRAL_TYPE_P (TREE_TYPE (old_rhs))
1281 && (TYPE_PRECISION (TREE_TYPE (lhs))
1282 > TYPE_PRECISION (TREE_TYPE (old_rhs)))
1283 && int_fits_type_p (rhs, TREE_TYPE (old_rhs)))
1285 tree newval = fold_convert (TREE_TYPE (old_rhs), rhs);
1286 record_equality (old_rhs, newval);
1291 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
1292 record_cond (eq);
1297 /* Dump SSA statistics on FILE. */
1299 void
1300 dump_dominator_optimization_stats (FILE *file)
1302 fprintf (file, "Total number of statements: %6ld\n\n",
1303 opt_stats.num_stmts);
1304 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1305 opt_stats.num_exprs_considered);
1307 fprintf (file, "\nHash table statistics:\n");
1309 fprintf (file, " avail_exprs: ");
1310 htab_statistics (file, avail_exprs);
1314 /* Dump SSA statistics on stderr. */
1316 DEBUG_FUNCTION void
1317 debug_dominator_optimization_stats (void)
1319 dump_dominator_optimization_stats (stderr);
1323 /* Dump statistics for the hash table HTAB. */
1325 static void
1326 htab_statistics (FILE *file, hash_table <expr_elt_hasher> htab)
1328 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1329 (long) htab.size (),
1330 (long) htab.elements (),
1331 htab.collisions ());
1335 /* Enter condition equivalence into the expression hash table.
1336 This indicates that a conditional expression has a known
1337 boolean value. */
1339 static void
1340 record_cond (cond_equivalence *p)
1342 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1343 expr_hash_elt **slot;
1345 initialize_hash_element_from_expr (&p->cond, p->value, element);
1347 slot = avail_exprs.find_slot_with_hash (element, element->hash, INSERT);
1348 if (*slot == NULL)
1350 *slot = element;
1352 if (dump_file && (dump_flags & TDF_DETAILS))
1354 fprintf (dump_file, "1>>> ");
1355 print_expr_hash_elt (dump_file, element);
1358 avail_exprs_stack.safe_push (element);
1360 else
1361 free_expr_hash_elt (element);
1364 /* Build a cond_equivalence record indicating that the comparison
1365 CODE holds between operands OP0 and OP1 and push it to **P. */
1367 static void
1368 build_and_record_new_cond (enum tree_code code,
1369 tree op0, tree op1,
1370 vec<cond_equivalence> *p)
1372 cond_equivalence c;
1373 struct hashable_expr *cond = &c.cond;
1375 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1377 cond->type = boolean_type_node;
1378 cond->kind = EXPR_BINARY;
1379 cond->ops.binary.op = code;
1380 cond->ops.binary.opnd0 = op0;
1381 cond->ops.binary.opnd1 = op1;
1383 c.value = boolean_true_node;
1384 p->safe_push (c);
1387 /* Record that COND is true and INVERTED is false into the edge information
1388 structure. Also record that any conditions dominated by COND are true
1389 as well.
1391 For example, if a < b is true, then a <= b must also be true. */
1393 static void
1394 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1396 tree op0, op1;
1397 cond_equivalence c;
1399 if (!COMPARISON_CLASS_P (cond))
1400 return;
1402 op0 = TREE_OPERAND (cond, 0);
1403 op1 = TREE_OPERAND (cond, 1);
1405 switch (TREE_CODE (cond))
1407 case LT_EXPR:
1408 case GT_EXPR:
1409 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1411 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1412 &edge_info->cond_equivalences);
1413 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1414 &edge_info->cond_equivalences);
1417 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1418 ? LE_EXPR : GE_EXPR),
1419 op0, op1, &edge_info->cond_equivalences);
1420 build_and_record_new_cond (NE_EXPR, op0, op1,
1421 &edge_info->cond_equivalences);
1422 break;
1424 case GE_EXPR:
1425 case LE_EXPR:
1426 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1428 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1429 &edge_info->cond_equivalences);
1431 break;
1433 case EQ_EXPR:
1434 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1436 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1437 &edge_info->cond_equivalences);
1439 build_and_record_new_cond (LE_EXPR, op0, op1,
1440 &edge_info->cond_equivalences);
1441 build_and_record_new_cond (GE_EXPR, op0, op1,
1442 &edge_info->cond_equivalences);
1443 break;
1445 case UNORDERED_EXPR:
1446 build_and_record_new_cond (NE_EXPR, op0, op1,
1447 &edge_info->cond_equivalences);
1448 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1449 &edge_info->cond_equivalences);
1450 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1451 &edge_info->cond_equivalences);
1452 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1453 &edge_info->cond_equivalences);
1454 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1455 &edge_info->cond_equivalences);
1456 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1457 &edge_info->cond_equivalences);
1458 break;
1460 case UNLT_EXPR:
1461 case UNGT_EXPR:
1462 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1463 ? UNLE_EXPR : UNGE_EXPR),
1464 op0, op1, &edge_info->cond_equivalences);
1465 build_and_record_new_cond (NE_EXPR, op0, op1,
1466 &edge_info->cond_equivalences);
1467 break;
1469 case UNEQ_EXPR:
1470 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1471 &edge_info->cond_equivalences);
1472 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1473 &edge_info->cond_equivalences);
1474 break;
1476 case LTGT_EXPR:
1477 build_and_record_new_cond (NE_EXPR, op0, op1,
1478 &edge_info->cond_equivalences);
1479 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1480 &edge_info->cond_equivalences);
1481 break;
1483 default:
1484 break;
1487 /* Now store the original true and false conditions into the first
1488 two slots. */
1489 initialize_expr_from_cond (cond, &c.cond);
1490 c.value = boolean_true_node;
1491 edge_info->cond_equivalences.safe_push (c);
1493 /* It is possible for INVERTED to be the negation of a comparison,
1494 and not a valid RHS or GIMPLE_COND condition. This happens because
1495 invert_truthvalue may return such an expression when asked to invert
1496 a floating-point comparison. These comparisons are not assumed to
1497 obey the trichotomy law. */
1498 initialize_expr_from_cond (inverted, &c.cond);
1499 c.value = boolean_false_node;
1500 edge_info->cond_equivalences.safe_push (c);
1503 /* A helper function for record_const_or_copy and record_equality.
1504 Do the work of recording the value and undo info. */
1506 static void
1507 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1509 set_ssa_name_value (x, y);
1511 if (dump_file && (dump_flags & TDF_DETAILS))
1513 fprintf (dump_file, "0>>> COPY ");
1514 print_generic_expr (dump_file, x, 0);
1515 fprintf (dump_file, " = ");
1516 print_generic_expr (dump_file, y, 0);
1517 fprintf (dump_file, "\n");
1520 const_and_copies_stack.reserve (2);
1521 const_and_copies_stack.quick_push (prev_x);
1522 const_and_copies_stack.quick_push (x);
1525 /* Return the loop depth of the basic block of the defining statement of X.
1526 This number should not be treated as absolutely correct because the loop
1527 information may not be completely up-to-date when dom runs. However, it
1528 will be relatively correct, and as more passes are taught to keep loop info
1529 up to date, the result will become more and more accurate. */
1532 loop_depth_of_name (tree x)
1534 gimple defstmt;
1535 basic_block defbb;
1537 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1538 if (TREE_CODE (x) != SSA_NAME)
1539 return 0;
1541 /* Otherwise return the loop depth of the defining statement's bb.
1542 Note that there may not actually be a bb for this statement, if the
1543 ssa_name is live on entry. */
1544 defstmt = SSA_NAME_DEF_STMT (x);
1545 defbb = gimple_bb (defstmt);
1546 if (!defbb)
1547 return 0;
1549 return bb_loop_depth (defbb);
1552 /* Record that X is equal to Y in const_and_copies. Record undo
1553 information in the block-local vector. */
1555 static void
1556 record_const_or_copy (tree x, tree y)
1558 tree prev_x = SSA_NAME_VALUE (x);
1560 gcc_assert (TREE_CODE (x) == SSA_NAME);
1562 if (TREE_CODE (y) == SSA_NAME)
1564 tree tmp = SSA_NAME_VALUE (y);
1565 if (tmp)
1566 y = tmp;
1569 record_const_or_copy_1 (x, y, prev_x);
1572 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1573 This constrains the cases in which we may treat this as assignment. */
1575 static void
1576 record_equality (tree x, tree y)
1578 tree prev_x = NULL, prev_y = NULL;
1580 if (TREE_CODE (x) == SSA_NAME)
1581 prev_x = SSA_NAME_VALUE (x);
1582 if (TREE_CODE (y) == SSA_NAME)
1583 prev_y = SSA_NAME_VALUE (y);
1585 /* If one of the previous values is invariant, or invariant in more loops
1586 (by depth), then use that.
1587 Otherwise it doesn't matter which value we choose, just so
1588 long as we canonicalize on one value. */
1589 if (is_gimple_min_invariant (y))
1591 else if (is_gimple_min_invariant (x)
1592 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1593 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1594 else if (prev_x && is_gimple_min_invariant (prev_x))
1595 x = y, y = prev_x, prev_x = prev_y;
1596 else if (prev_y)
1597 y = prev_y;
1599 /* After the swapping, we must have one SSA_NAME. */
1600 if (TREE_CODE (x) != SSA_NAME)
1601 return;
1603 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1604 variable compared against zero. If we're honoring signed zeros,
1605 then we cannot record this value unless we know that the value is
1606 nonzero. */
1607 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1608 && (TREE_CODE (y) != REAL_CST
1609 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1610 return;
1612 record_const_or_copy_1 (x, y, prev_x);
1615 /* Returns true when STMT is a simple iv increment. It detects the
1616 following situation:
1618 i_1 = phi (..., i_2)
1619 i_2 = i_1 +/- ... */
1621 bool
1622 simple_iv_increment_p (gimple stmt)
1624 enum tree_code code;
1625 tree lhs, preinc;
1626 gimple phi;
1627 size_t i;
1629 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1630 return false;
1632 lhs = gimple_assign_lhs (stmt);
1633 if (TREE_CODE (lhs) != SSA_NAME)
1634 return false;
1636 code = gimple_assign_rhs_code (stmt);
1637 if (code != PLUS_EXPR
1638 && code != MINUS_EXPR
1639 && code != POINTER_PLUS_EXPR)
1640 return false;
1642 preinc = gimple_assign_rhs1 (stmt);
1643 if (TREE_CODE (preinc) != SSA_NAME)
1644 return false;
1646 phi = SSA_NAME_DEF_STMT (preinc);
1647 if (gimple_code (phi) != GIMPLE_PHI)
1648 return false;
1650 for (i = 0; i < gimple_phi_num_args (phi); i++)
1651 if (gimple_phi_arg_def (phi, i) == lhs)
1652 return true;
1654 return false;
1657 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1658 known value for that SSA_NAME (or NULL if no value is known).
1660 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1661 successors of BB. */
1663 static void
1664 cprop_into_successor_phis (basic_block bb)
1666 edge e;
1667 edge_iterator ei;
1669 FOR_EACH_EDGE (e, ei, bb->succs)
1671 int indx;
1672 gimple_stmt_iterator gsi;
1674 /* If this is an abnormal edge, then we do not want to copy propagate
1675 into the PHI alternative associated with this edge. */
1676 if (e->flags & EDGE_ABNORMAL)
1677 continue;
1679 gsi = gsi_start_phis (e->dest);
1680 if (gsi_end_p (gsi))
1681 continue;
1683 /* We may have an equivalence associated with this edge. While
1684 we can not propagate it into non-dominated blocks, we can
1685 propagate them into PHIs in non-dominated blocks. */
1687 /* Push the unwind marker so we can reset the const and copies
1688 table back to its original state after processing this edge. */
1689 const_and_copies_stack.safe_push (NULL_TREE);
1691 /* Extract and record any simple NAME = VALUE equivalences.
1693 Don't bother with [01] = COND equivalences, they're not useful
1694 here. */
1695 struct edge_info *edge_info = (struct edge_info *) e->aux;
1696 if (edge_info)
1698 tree lhs = edge_info->lhs;
1699 tree rhs = edge_info->rhs;
1701 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1702 record_const_or_copy (lhs, rhs);
1705 indx = e->dest_idx;
1706 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1708 tree new_val;
1709 use_operand_p orig_p;
1710 tree orig_val;
1711 gimple phi = gsi_stmt (gsi);
1713 /* The alternative may be associated with a constant, so verify
1714 it is an SSA_NAME before doing anything with it. */
1715 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1716 orig_val = get_use_from_ptr (orig_p);
1717 if (TREE_CODE (orig_val) != SSA_NAME)
1718 continue;
1720 /* If we have *ORIG_P in our constant/copy table, then replace
1721 ORIG_P with its value in our constant/copy table. */
1722 new_val = SSA_NAME_VALUE (orig_val);
1723 if (new_val
1724 && new_val != orig_val
1725 && (TREE_CODE (new_val) == SSA_NAME
1726 || is_gimple_min_invariant (new_val))
1727 && may_propagate_copy (orig_val, new_val))
1728 propagate_value (orig_p, new_val);
1731 restore_vars_to_original_value ();
1735 /* We have finished optimizing BB, record any information implied by
1736 taking a specific outgoing edge from BB. */
1738 static void
1739 record_edge_info (basic_block bb)
1741 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1742 struct edge_info *edge_info;
1744 if (! gsi_end_p (gsi))
1746 gimple stmt = gsi_stmt (gsi);
1747 location_t loc = gimple_location (stmt);
1749 if (gimple_code (stmt) == GIMPLE_SWITCH)
1751 tree index = gimple_switch_index (stmt);
1753 if (TREE_CODE (index) == SSA_NAME)
1755 int i;
1756 int n_labels = gimple_switch_num_labels (stmt);
1757 tree *info = XCNEWVEC (tree, last_basic_block);
1758 edge e;
1759 edge_iterator ei;
1761 for (i = 0; i < n_labels; i++)
1763 tree label = gimple_switch_label (stmt, i);
1764 basic_block target_bb = label_to_block (CASE_LABEL (label));
1765 if (CASE_HIGH (label)
1766 || !CASE_LOW (label)
1767 || info[target_bb->index])
1768 info[target_bb->index] = error_mark_node;
1769 else
1770 info[target_bb->index] = label;
1773 FOR_EACH_EDGE (e, ei, bb->succs)
1775 basic_block target_bb = e->dest;
1776 tree label = info[target_bb->index];
1778 if (label != NULL && label != error_mark_node)
1780 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1781 CASE_LOW (label));
1782 edge_info = allocate_edge_info (e);
1783 edge_info->lhs = index;
1784 edge_info->rhs = x;
1787 free (info);
1791 /* A COND_EXPR may create equivalences too. */
1792 if (gimple_code (stmt) == GIMPLE_COND)
1794 edge true_edge;
1795 edge false_edge;
1797 tree op0 = gimple_cond_lhs (stmt);
1798 tree op1 = gimple_cond_rhs (stmt);
1799 enum tree_code code = gimple_cond_code (stmt);
1801 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1803 /* Special case comparing booleans against a constant as we
1804 know the value of OP0 on both arms of the branch. i.e., we
1805 can record an equivalence for OP0 rather than COND. */
1806 if ((code == EQ_EXPR || code == NE_EXPR)
1807 && TREE_CODE (op0) == SSA_NAME
1808 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1809 && is_gimple_min_invariant (op1))
1811 if (code == EQ_EXPR)
1813 edge_info = allocate_edge_info (true_edge);
1814 edge_info->lhs = op0;
1815 edge_info->rhs = (integer_zerop (op1)
1816 ? boolean_false_node
1817 : boolean_true_node);
1819 edge_info = allocate_edge_info (false_edge);
1820 edge_info->lhs = op0;
1821 edge_info->rhs = (integer_zerop (op1)
1822 ? boolean_true_node
1823 : boolean_false_node);
1825 else
1827 edge_info = allocate_edge_info (true_edge);
1828 edge_info->lhs = op0;
1829 edge_info->rhs = (integer_zerop (op1)
1830 ? boolean_true_node
1831 : boolean_false_node);
1833 edge_info = allocate_edge_info (false_edge);
1834 edge_info->lhs = op0;
1835 edge_info->rhs = (integer_zerop (op1)
1836 ? boolean_false_node
1837 : boolean_true_node);
1840 else if (is_gimple_min_invariant (op0)
1841 && (TREE_CODE (op1) == SSA_NAME
1842 || is_gimple_min_invariant (op1)))
1844 tree cond = build2 (code, boolean_type_node, op0, op1);
1845 tree inverted = invert_truthvalue_loc (loc, cond);
1846 bool can_infer_simple_equiv
1847 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
1848 && real_zerop (op0));
1849 struct edge_info *edge_info;
1851 edge_info = allocate_edge_info (true_edge);
1852 record_conditions (edge_info, cond, inverted);
1854 if (can_infer_simple_equiv && code == EQ_EXPR)
1856 edge_info->lhs = op1;
1857 edge_info->rhs = op0;
1860 edge_info = allocate_edge_info (false_edge);
1861 record_conditions (edge_info, inverted, cond);
1863 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1865 edge_info->lhs = op1;
1866 edge_info->rhs = op0;
1870 else if (TREE_CODE (op0) == SSA_NAME
1871 && (TREE_CODE (op1) == SSA_NAME
1872 || is_gimple_min_invariant (op1)))
1874 tree cond = build2 (code, boolean_type_node, op0, op1);
1875 tree inverted = invert_truthvalue_loc (loc, cond);
1876 bool can_infer_simple_equiv
1877 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op1)))
1878 && (TREE_CODE (op1) == SSA_NAME || real_zerop (op1)));
1879 struct edge_info *edge_info;
1881 edge_info = allocate_edge_info (true_edge);
1882 record_conditions (edge_info, cond, inverted);
1884 if (can_infer_simple_equiv && code == EQ_EXPR)
1886 edge_info->lhs = op0;
1887 edge_info->rhs = op1;
1890 edge_info = allocate_edge_info (false_edge);
1891 record_conditions (edge_info, inverted, cond);
1893 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1895 edge_info->lhs = op0;
1896 edge_info->rhs = op1;
1901 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1905 void
1906 dom_opt_dom_walker::before_dom_children (basic_block bb)
1908 gimple_stmt_iterator gsi;
1910 if (dump_file && (dump_flags & TDF_DETAILS))
1911 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1913 /* Push a marker on the stacks of local information so that we know how
1914 far to unwind when we finalize this block. */
1915 avail_exprs_stack.safe_push (NULL);
1916 const_and_copies_stack.safe_push (NULL_TREE);
1918 record_equivalences_from_incoming_edge (bb);
1920 /* PHI nodes can create equivalences too. */
1921 record_equivalences_from_phis (bb);
1923 /* Create equivalences from redundant PHIs. PHIs are only truly
1924 redundant when they exist in the same block, so push another
1925 marker and unwind right afterwards. */
1926 avail_exprs_stack.safe_push (NULL);
1927 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1928 eliminate_redundant_computations (&gsi);
1929 remove_local_expressions_from_table ();
1931 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1932 optimize_stmt (bb, gsi);
1934 /* Now prepare to process dominated blocks. */
1935 record_edge_info (bb);
1936 cprop_into_successor_phis (bb);
1939 /* We have finished processing the dominator children of BB, perform
1940 any finalization actions in preparation for leaving this node in
1941 the dominator tree. */
1943 void
1944 dom_opt_dom_walker::after_dom_children (basic_block bb)
1946 gimple last;
1948 /* If we have an outgoing edge to a block with multiple incoming and
1949 outgoing edges, then we may be able to thread the edge, i.e., we
1950 may be able to statically determine which of the outgoing edges
1951 will be traversed when the incoming edge from BB is traversed. */
1952 if (single_succ_p (bb)
1953 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1954 && potentially_threadable_block (single_succ (bb)))
1956 thread_across_edge (single_succ_edge (bb));
1958 else if ((last = last_stmt (bb))
1959 && gimple_code (last) == GIMPLE_COND
1960 && EDGE_COUNT (bb->succs) == 2
1961 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1962 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1964 edge true_edge, false_edge;
1966 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1968 /* Only try to thread the edge if it reaches a target block with
1969 more than one predecessor and more than one successor. */
1970 if (potentially_threadable_block (true_edge->dest))
1971 thread_across_edge (true_edge);
1973 /* Similarly for the ELSE arm. */
1974 if (potentially_threadable_block (false_edge->dest))
1975 thread_across_edge (false_edge);
1979 /* These remove expressions local to BB from the tables. */
1980 remove_local_expressions_from_table ();
1981 restore_vars_to_original_value ();
1984 /* Search for redundant computations in STMT. If any are found, then
1985 replace them with the variable holding the result of the computation.
1987 If safe, record this expression into the available expression hash
1988 table. */
1990 static void
1991 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1993 tree expr_type;
1994 tree cached_lhs;
1995 tree def;
1996 bool insert = true;
1997 bool assigns_var_p = false;
1999 gimple stmt = gsi_stmt (*gsi);
2001 if (gimple_code (stmt) == GIMPLE_PHI)
2002 def = gimple_phi_result (stmt);
2003 else
2004 def = gimple_get_lhs (stmt);
2006 /* Certain expressions on the RHS can be optimized away, but can not
2007 themselves be entered into the hash tables. */
2008 if (! def
2009 || TREE_CODE (def) != SSA_NAME
2010 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2011 || gimple_vdef (stmt)
2012 /* Do not record equivalences for increments of ivs. This would create
2013 overlapping live ranges for a very questionable gain. */
2014 || simple_iv_increment_p (stmt))
2015 insert = false;
2017 /* Check if the expression has been computed before. */
2018 cached_lhs = lookup_avail_expr (stmt, insert);
2020 opt_stats.num_exprs_considered++;
2022 /* Get the type of the expression we are trying to optimize. */
2023 if (is_gimple_assign (stmt))
2025 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
2026 assigns_var_p = true;
2028 else if (gimple_code (stmt) == GIMPLE_COND)
2029 expr_type = boolean_type_node;
2030 else if (is_gimple_call (stmt))
2032 gcc_assert (gimple_call_lhs (stmt));
2033 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
2034 assigns_var_p = true;
2036 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2037 expr_type = TREE_TYPE (gimple_switch_index (stmt));
2038 else if (gimple_code (stmt) == GIMPLE_PHI)
2039 /* We can't propagate into a phi, so the logic below doesn't apply.
2040 Instead record an equivalence between the cached LHS and the
2041 PHI result of this statement, provided they are in the same block.
2042 This should be sufficient to kill the redundant phi. */
2044 if (def && cached_lhs)
2045 record_const_or_copy (def, cached_lhs);
2046 return;
2048 else
2049 gcc_unreachable ();
2051 if (!cached_lhs)
2052 return;
2054 /* It is safe to ignore types here since we have already done
2055 type checking in the hashing and equality routines. In fact
2056 type checking here merely gets in the way of constant
2057 propagation. Also, make sure that it is safe to propagate
2058 CACHED_LHS into the expression in STMT. */
2059 if ((TREE_CODE (cached_lhs) != SSA_NAME
2060 && (assigns_var_p
2061 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
2062 || may_propagate_copy_into_stmt (stmt, cached_lhs))
2064 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
2065 || is_gimple_min_invariant (cached_lhs));
2067 if (dump_file && (dump_flags & TDF_DETAILS))
2069 fprintf (dump_file, " Replaced redundant expr '");
2070 print_gimple_expr (dump_file, stmt, 0, dump_flags);
2071 fprintf (dump_file, "' with '");
2072 print_generic_expr (dump_file, cached_lhs, dump_flags);
2073 fprintf (dump_file, "'\n");
2076 opt_stats.num_re++;
2078 if (assigns_var_p
2079 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
2080 cached_lhs = fold_convert (expr_type, cached_lhs);
2082 propagate_tree_value_into_stmt (gsi, cached_lhs);
2084 /* Since it is always necessary to mark the result as modified,
2085 perhaps we should move this into propagate_tree_value_into_stmt
2086 itself. */
2087 gimple_set_modified (gsi_stmt (*gsi), true);
2091 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
2092 the available expressions table or the const_and_copies table.
2093 Detect and record those equivalences. */
2094 /* We handle only very simple copy equivalences here. The heavy
2095 lifing is done by eliminate_redundant_computations. */
2097 static void
2098 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
2100 tree lhs;
2101 enum tree_code lhs_code;
2103 gcc_assert (is_gimple_assign (stmt));
2105 lhs = gimple_assign_lhs (stmt);
2106 lhs_code = TREE_CODE (lhs);
2108 if (lhs_code == SSA_NAME
2109 && gimple_assign_single_p (stmt))
2111 tree rhs = gimple_assign_rhs1 (stmt);
2113 /* If the RHS of the assignment is a constant or another variable that
2114 may be propagated, register it in the CONST_AND_COPIES table. We
2115 do not need to record unwind data for this, since this is a true
2116 assignment and not an equivalence inferred from a comparison. All
2117 uses of this ssa name are dominated by this assignment, so unwinding
2118 just costs time and space. */
2119 if (may_optimize_p
2120 && (TREE_CODE (rhs) == SSA_NAME
2121 || is_gimple_min_invariant (rhs)))
2123 if (dump_file && (dump_flags & TDF_DETAILS))
2125 fprintf (dump_file, "==== ASGN ");
2126 print_generic_expr (dump_file, lhs, 0);
2127 fprintf (dump_file, " = ");
2128 print_generic_expr (dump_file, rhs, 0);
2129 fprintf (dump_file, "\n");
2132 set_ssa_name_value (lhs, rhs);
2136 /* A memory store, even an aliased store, creates a useful
2137 equivalence. By exchanging the LHS and RHS, creating suitable
2138 vops and recording the result in the available expression table,
2139 we may be able to expose more redundant loads. */
2140 if (!gimple_has_volatile_ops (stmt)
2141 && gimple_references_memory_p (stmt)
2142 && gimple_assign_single_p (stmt)
2143 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2144 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2145 && !is_gimple_reg (lhs))
2147 tree rhs = gimple_assign_rhs1 (stmt);
2148 gimple new_stmt;
2150 /* Build a new statement with the RHS and LHS exchanged. */
2151 if (TREE_CODE (rhs) == SSA_NAME)
2153 /* NOTE tuples. The call to gimple_build_assign below replaced
2154 a call to build_gimple_modify_stmt, which did not set the
2155 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
2156 may cause an SSA validation failure, as the LHS may be a
2157 default-initialized name and should have no definition. I'm
2158 a bit dubious of this, as the artificial statement that we
2159 generate here may in fact be ill-formed, but it is simply
2160 used as an internal device in this pass, and never becomes
2161 part of the CFG. */
2162 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2163 new_stmt = gimple_build_assign (rhs, lhs);
2164 SSA_NAME_DEF_STMT (rhs) = defstmt;
2166 else
2167 new_stmt = gimple_build_assign (rhs, lhs);
2169 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
2171 /* Finally enter the statement into the available expression
2172 table. */
2173 lookup_avail_expr (new_stmt, true);
2177 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2178 CONST_AND_COPIES. */
2180 static void
2181 cprop_operand (gimple stmt, use_operand_p op_p)
2183 tree val;
2184 tree op = USE_FROM_PTR (op_p);
2186 /* If the operand has a known constant value or it is known to be a
2187 copy of some other variable, use the value or copy stored in
2188 CONST_AND_COPIES. */
2189 val = SSA_NAME_VALUE (op);
2190 if (val && val != op)
2192 /* Do not replace hard register operands in asm statements. */
2193 if (gimple_code (stmt) == GIMPLE_ASM
2194 && !may_propagate_copy_into_asm (op))
2195 return;
2197 /* Certain operands are not allowed to be copy propagated due
2198 to their interaction with exception handling and some GCC
2199 extensions. */
2200 if (!may_propagate_copy (op, val))
2201 return;
2203 /* Do not propagate addresses that point to volatiles into memory
2204 stmts without volatile operands. */
2205 if (POINTER_TYPE_P (TREE_TYPE (val))
2206 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2207 && gimple_has_mem_ops (stmt)
2208 && !gimple_has_volatile_ops (stmt))
2209 return;
2211 /* Do not propagate copies if the propagated value is at a deeper loop
2212 depth than the propagatee. Otherwise, this may move loop variant
2213 variables outside of their loops and prevent coalescing
2214 opportunities. If the value was loop invariant, it will be hoisted
2215 by LICM and exposed for copy propagation. */
2216 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2217 return;
2219 /* Do not propagate copies into simple IV increment statements.
2220 See PR23821 for how this can disturb IV analysis. */
2221 if (TREE_CODE (val) != INTEGER_CST
2222 && simple_iv_increment_p (stmt))
2223 return;
2225 /* Dump details. */
2226 if (dump_file && (dump_flags & TDF_DETAILS))
2228 fprintf (dump_file, " Replaced '");
2229 print_generic_expr (dump_file, op, dump_flags);
2230 fprintf (dump_file, "' with %s '",
2231 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2232 print_generic_expr (dump_file, val, dump_flags);
2233 fprintf (dump_file, "'\n");
2236 if (TREE_CODE (val) != SSA_NAME)
2237 opt_stats.num_const_prop++;
2238 else
2239 opt_stats.num_copy_prop++;
2241 propagate_value (op_p, val);
2243 /* And note that we modified this statement. This is now
2244 safe, even if we changed virtual operands since we will
2245 rescan the statement and rewrite its operands again. */
2246 gimple_set_modified (stmt, true);
2250 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2251 known value for that SSA_NAME (or NULL if no value is known).
2253 Propagate values from CONST_AND_COPIES into the uses, vuses and
2254 vdef_ops of STMT. */
2256 static void
2257 cprop_into_stmt (gimple stmt)
2259 use_operand_p op_p;
2260 ssa_op_iter iter;
2262 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_USE)
2263 cprop_operand (stmt, op_p);
2266 /* Optimize the statement pointed to by iterator SI.
2268 We try to perform some simplistic global redundancy elimination and
2269 constant propagation:
2271 1- To detect global redundancy, we keep track of expressions that have
2272 been computed in this block and its dominators. If we find that the
2273 same expression is computed more than once, we eliminate repeated
2274 computations by using the target of the first one.
2276 2- Constant values and copy assignments. This is used to do very
2277 simplistic constant and copy propagation. When a constant or copy
2278 assignment is found, we map the value on the RHS of the assignment to
2279 the variable in the LHS in the CONST_AND_COPIES table. */
2281 static void
2282 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2284 gimple stmt, old_stmt;
2285 bool may_optimize_p;
2286 bool modified_p = false;
2288 old_stmt = stmt = gsi_stmt (si);
2290 if (dump_file && (dump_flags & TDF_DETAILS))
2292 fprintf (dump_file, "Optimizing statement ");
2293 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2296 if (gimple_code (stmt) == GIMPLE_COND)
2297 canonicalize_comparison (stmt);
2299 update_stmt_if_modified (stmt);
2300 opt_stats.num_stmts++;
2302 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2303 cprop_into_stmt (stmt);
2305 /* If the statement has been modified with constant replacements,
2306 fold its RHS before checking for redundant computations. */
2307 if (gimple_modified_p (stmt))
2309 tree rhs = NULL;
2311 /* Try to fold the statement making sure that STMT is kept
2312 up to date. */
2313 if (fold_stmt (&si))
2315 stmt = gsi_stmt (si);
2316 gimple_set_modified (stmt, true);
2318 if (dump_file && (dump_flags & TDF_DETAILS))
2320 fprintf (dump_file, " Folded to: ");
2321 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2325 /* We only need to consider cases that can yield a gimple operand. */
2326 if (gimple_assign_single_p (stmt))
2327 rhs = gimple_assign_rhs1 (stmt);
2328 else if (gimple_code (stmt) == GIMPLE_GOTO)
2329 rhs = gimple_goto_dest (stmt);
2330 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2331 /* This should never be an ADDR_EXPR. */
2332 rhs = gimple_switch_index (stmt);
2334 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2335 recompute_tree_invariant_for_addr_expr (rhs);
2337 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2338 even if fold_stmt updated the stmt already and thus cleared
2339 gimple_modified_p flag on it. */
2340 modified_p = true;
2343 /* Check for redundant computations. Do this optimization only
2344 for assignments that have no volatile ops and conditionals. */
2345 may_optimize_p = (!gimple_has_side_effects (stmt)
2346 && (is_gimple_assign (stmt)
2347 || (is_gimple_call (stmt)
2348 && gimple_call_lhs (stmt) != NULL_TREE)
2349 || gimple_code (stmt) == GIMPLE_COND
2350 || gimple_code (stmt) == GIMPLE_SWITCH));
2352 if (may_optimize_p)
2354 if (gimple_code (stmt) == GIMPLE_CALL)
2356 /* Resolve __builtin_constant_p. If it hasn't been
2357 folded to integer_one_node by now, it's fairly
2358 certain that the value simply isn't constant. */
2359 tree callee = gimple_call_fndecl (stmt);
2360 if (callee
2361 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2362 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2364 propagate_tree_value_into_stmt (&si, integer_zero_node);
2365 stmt = gsi_stmt (si);
2369 update_stmt_if_modified (stmt);
2370 eliminate_redundant_computations (&si);
2371 stmt = gsi_stmt (si);
2373 /* Perform simple redundant store elimination. */
2374 if (gimple_assign_single_p (stmt)
2375 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2377 tree lhs = gimple_assign_lhs (stmt);
2378 tree rhs = gimple_assign_rhs1 (stmt);
2379 tree cached_lhs;
2380 gimple new_stmt;
2381 if (TREE_CODE (rhs) == SSA_NAME)
2383 tree tem = SSA_NAME_VALUE (rhs);
2384 if (tem)
2385 rhs = tem;
2387 /* Build a new statement with the RHS and LHS exchanged. */
2388 if (TREE_CODE (rhs) == SSA_NAME)
2390 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2391 new_stmt = gimple_build_assign (rhs, lhs);
2392 SSA_NAME_DEF_STMT (rhs) = defstmt;
2394 else
2395 new_stmt = gimple_build_assign (rhs, lhs);
2396 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2397 cached_lhs = lookup_avail_expr (new_stmt, false);
2398 if (cached_lhs
2399 && rhs == cached_lhs)
2401 basic_block bb = gimple_bb (stmt);
2402 unlink_stmt_vdef (stmt);
2403 if (gsi_remove (&si, true))
2405 bitmap_set_bit (need_eh_cleanup, bb->index);
2406 if (dump_file && (dump_flags & TDF_DETAILS))
2407 fprintf (dump_file, " Flagged to clear EH edges.\n");
2409 release_defs (stmt);
2410 return;
2415 /* Record any additional equivalences created by this statement. */
2416 if (is_gimple_assign (stmt))
2417 record_equivalences_from_stmt (stmt, may_optimize_p);
2419 /* If STMT is a COND_EXPR and it was modified, then we may know
2420 where it goes. If that is the case, then mark the CFG as altered.
2422 This will cause us to later call remove_unreachable_blocks and
2423 cleanup_tree_cfg when it is safe to do so. It is not safe to
2424 clean things up here since removal of edges and such can trigger
2425 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2426 the manager.
2428 That's all fine and good, except that once SSA_NAMEs are released
2429 to the manager, we must not call create_ssa_name until all references
2430 to released SSA_NAMEs have been eliminated.
2432 All references to the deleted SSA_NAMEs can not be eliminated until
2433 we remove unreachable blocks.
2435 We can not remove unreachable blocks until after we have completed
2436 any queued jump threading.
2438 We can not complete any queued jump threads until we have taken
2439 appropriate variables out of SSA form. Taking variables out of
2440 SSA form can call create_ssa_name and thus we lose.
2442 Ultimately I suspect we're going to need to change the interface
2443 into the SSA_NAME manager. */
2444 if (gimple_modified_p (stmt) || modified_p)
2446 tree val = NULL;
2448 update_stmt_if_modified (stmt);
2450 if (gimple_code (stmt) == GIMPLE_COND)
2451 val = fold_binary_loc (gimple_location (stmt),
2452 gimple_cond_code (stmt), boolean_type_node,
2453 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2454 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2455 val = gimple_switch_index (stmt);
2457 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2458 cfg_altered = true;
2460 /* If we simplified a statement in such a way as to be shown that it
2461 cannot trap, update the eh information and the cfg to match. */
2462 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2464 bitmap_set_bit (need_eh_cleanup, bb->index);
2465 if (dump_file && (dump_flags & TDF_DETAILS))
2466 fprintf (dump_file, " Flagged to clear EH edges.\n");
2471 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2472 If found, return its LHS. Otherwise insert STMT in the table and
2473 return NULL_TREE.
2475 Also, when an expression is first inserted in the table, it is also
2476 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2477 we finish processing this block and its children. */
2479 static tree
2480 lookup_avail_expr (gimple stmt, bool insert)
2482 expr_hash_elt **slot;
2483 tree lhs;
2484 tree temp;
2485 struct expr_hash_elt element;
2487 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
2488 if (gimple_code (stmt) == GIMPLE_PHI)
2489 lhs = gimple_phi_result (stmt);
2490 else
2491 lhs = gimple_get_lhs (stmt);
2493 initialize_hash_element (stmt, lhs, &element);
2495 if (dump_file && (dump_flags & TDF_DETAILS))
2497 fprintf (dump_file, "LKUP ");
2498 print_expr_hash_elt (dump_file, &element);
2501 /* Don't bother remembering constant assignments and copy operations.
2502 Constants and copy operations are handled by the constant/copy propagator
2503 in optimize_stmt. */
2504 if (element.expr.kind == EXPR_SINGLE
2505 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2506 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2507 return NULL_TREE;
2509 /* Finally try to find the expression in the main expression hash table. */
2510 slot = avail_exprs.find_slot_with_hash (&element, element.hash,
2511 (insert ? INSERT : NO_INSERT));
2512 if (slot == NULL)
2514 free_expr_hash_elt_contents (&element);
2515 return NULL_TREE;
2517 else if (*slot == NULL)
2519 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2520 *element2 = element;
2521 element2->stamp = element2;
2522 *slot = element2;
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2526 fprintf (dump_file, "2>>> ");
2527 print_expr_hash_elt (dump_file, element2);
2530 avail_exprs_stack.safe_push (element2);
2531 return NULL_TREE;
2533 else
2534 free_expr_hash_elt_contents (&element);
2536 /* Extract the LHS of the assignment so that it can be used as the current
2537 definition of another variable. */
2538 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2540 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2541 use the value from the const_and_copies table. */
2542 if (TREE_CODE (lhs) == SSA_NAME)
2544 temp = SSA_NAME_VALUE (lhs);
2545 if (temp)
2546 lhs = temp;
2549 if (dump_file && (dump_flags & TDF_DETAILS))
2551 fprintf (dump_file, "FIND: ");
2552 print_generic_expr (dump_file, lhs, 0);
2553 fprintf (dump_file, "\n");
2556 return lhs;
2559 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2560 for expressions using the code of the expression and the SSA numbers of
2561 its operands. */
2563 static hashval_t
2564 avail_expr_hash (const void *p)
2566 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2567 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2568 tree vuse;
2569 hashval_t val = 0;
2571 val = iterative_hash_hashable_expr (expr, val);
2573 /* If the hash table entry is not associated with a statement, then we
2574 can just hash the expression and not worry about virtual operands
2575 and such. */
2576 if (!stmt)
2577 return val;
2579 /* Add the SSA version numbers of the vuse operand. This is important
2580 because compound variables like arrays are not renamed in the
2581 operands. Rather, the rename is done on the virtual variable
2582 representing all the elements of the array. */
2583 if ((vuse = gimple_vuse (stmt)))
2584 val = iterative_hash_expr (vuse, val);
2586 return val;
2589 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2590 up degenerate PHIs created by or exposed by jump threading. */
2592 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2593 NULL. */
2595 tree
2596 degenerate_phi_result (gimple phi)
2598 tree lhs = gimple_phi_result (phi);
2599 tree val = NULL;
2600 size_t i;
2602 /* Ignoring arguments which are the same as LHS, if all the remaining
2603 arguments are the same, then the PHI is a degenerate and has the
2604 value of that common argument. */
2605 for (i = 0; i < gimple_phi_num_args (phi); i++)
2607 tree arg = gimple_phi_arg_def (phi, i);
2609 if (arg == lhs)
2610 continue;
2611 else if (!arg)
2612 break;
2613 else if (!val)
2614 val = arg;
2615 else if (arg == val)
2616 continue;
2617 /* We bring in some of operand_equal_p not only to speed things
2618 up, but also to avoid crashing when dereferencing the type of
2619 a released SSA name. */
2620 else if (TREE_CODE (val) != TREE_CODE (arg)
2621 || TREE_CODE (val) == SSA_NAME
2622 || !operand_equal_p (arg, val, 0))
2623 break;
2625 return (i == gimple_phi_num_args (phi) ? val : NULL);
2628 /* Given a statement STMT, which is either a PHI node or an assignment,
2629 remove it from the IL. */
2631 static void
2632 remove_stmt_or_phi (gimple stmt)
2634 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2636 if (gimple_code (stmt) == GIMPLE_PHI)
2637 remove_phi_node (&gsi, true);
2638 else
2640 gsi_remove (&gsi, true);
2641 release_defs (stmt);
2645 /* Given a statement STMT, which is either a PHI node or an assignment,
2646 return the "rhs" of the node, in the case of a non-degenerate
2647 phi, NULL is returned. */
2649 static tree
2650 get_rhs_or_phi_arg (gimple stmt)
2652 if (gimple_code (stmt) == GIMPLE_PHI)
2653 return degenerate_phi_result (stmt);
2654 else if (gimple_assign_single_p (stmt))
2655 return gimple_assign_rhs1 (stmt);
2656 else
2657 gcc_unreachable ();
2661 /* Given a statement STMT, which is either a PHI node or an assignment,
2662 return the "lhs" of the node. */
2664 static tree
2665 get_lhs_or_phi_result (gimple stmt)
2667 if (gimple_code (stmt) == GIMPLE_PHI)
2668 return gimple_phi_result (stmt);
2669 else if (is_gimple_assign (stmt))
2670 return gimple_assign_lhs (stmt);
2671 else
2672 gcc_unreachable ();
2675 /* Propagate RHS into all uses of LHS (when possible).
2677 RHS and LHS are derived from STMT, which is passed in solely so
2678 that we can remove it if propagation is successful.
2680 When propagating into a PHI node or into a statement which turns
2681 into a trivial copy or constant initialization, set the
2682 appropriate bit in INTERESTING_NAMEs so that we will visit those
2683 nodes as well in an effort to pick up secondary optimization
2684 opportunities. */
2686 static void
2687 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2689 /* First verify that propagation is valid and isn't going to move a
2690 loop variant variable outside its loop. */
2691 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2692 && (TREE_CODE (rhs) != SSA_NAME
2693 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2694 && may_propagate_copy (lhs, rhs)
2695 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2697 use_operand_p use_p;
2698 imm_use_iterator iter;
2699 gimple use_stmt;
2700 bool all = true;
2702 /* Dump details. */
2703 if (dump_file && (dump_flags & TDF_DETAILS))
2705 fprintf (dump_file, " Replacing '");
2706 print_generic_expr (dump_file, lhs, dump_flags);
2707 fprintf (dump_file, "' with %s '",
2708 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2709 print_generic_expr (dump_file, rhs, dump_flags);
2710 fprintf (dump_file, "'\n");
2713 /* Walk over every use of LHS and try to replace the use with RHS.
2714 At this point the only reason why such a propagation would not
2715 be successful would be if the use occurs in an ASM_EXPR. */
2716 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2718 /* Leave debug stmts alone. If we succeed in propagating
2719 all non-debug uses, we'll drop the DEF, and propagation
2720 into debug stmts will occur then. */
2721 if (gimple_debug_bind_p (use_stmt))
2722 continue;
2724 /* It's not always safe to propagate into an ASM_EXPR. */
2725 if (gimple_code (use_stmt) == GIMPLE_ASM
2726 && ! may_propagate_copy_into_asm (lhs))
2728 all = false;
2729 continue;
2732 /* It's not ok to propagate into the definition stmt of RHS.
2733 <bb 9>:
2734 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2735 g_67.1_6 = prephitmp.12_36;
2736 goto <bb 9>;
2737 While this is strictly all dead code we do not want to
2738 deal with this here. */
2739 if (TREE_CODE (rhs) == SSA_NAME
2740 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2742 all = false;
2743 continue;
2746 /* Dump details. */
2747 if (dump_file && (dump_flags & TDF_DETAILS))
2749 fprintf (dump_file, " Original statement:");
2750 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2753 /* Propagate the RHS into this use of the LHS. */
2754 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2755 propagate_value (use_p, rhs);
2757 /* Special cases to avoid useless calls into the folding
2758 routines, operand scanning, etc.
2760 Propagation into a PHI may cause the PHI to become
2761 a degenerate, so mark the PHI as interesting. No other
2762 actions are necessary. */
2763 if (gimple_code (use_stmt) == GIMPLE_PHI)
2765 tree result;
2767 /* Dump details. */
2768 if (dump_file && (dump_flags & TDF_DETAILS))
2770 fprintf (dump_file, " Updated statement:");
2771 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2774 result = get_lhs_or_phi_result (use_stmt);
2775 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2776 continue;
2779 /* From this point onward we are propagating into a
2780 real statement. Folding may (or may not) be possible,
2781 we may expose new operands, expose dead EH edges,
2782 etc. */
2783 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2784 cannot fold a call that simplifies to a constant,
2785 because the GIMPLE_CALL must be replaced by a
2786 GIMPLE_ASSIGN, and there is no way to effect such a
2787 transformation in-place. We might want to consider
2788 using the more general fold_stmt here. */
2790 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
2791 fold_stmt_inplace (&gsi);
2794 /* Sometimes propagation can expose new operands to the
2795 renamer. */
2796 update_stmt (use_stmt);
2798 /* Dump details. */
2799 if (dump_file && (dump_flags & TDF_DETAILS))
2801 fprintf (dump_file, " Updated statement:");
2802 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2805 /* If we replaced a variable index with a constant, then
2806 we would need to update the invariant flag for ADDR_EXPRs. */
2807 if (gimple_assign_single_p (use_stmt)
2808 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2809 recompute_tree_invariant_for_addr_expr
2810 (gimple_assign_rhs1 (use_stmt));
2812 /* If we cleaned up EH information from the statement,
2813 mark its containing block as needing EH cleanups. */
2814 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2816 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2817 if (dump_file && (dump_flags & TDF_DETAILS))
2818 fprintf (dump_file, " Flagged to clear EH edges.\n");
2821 /* Propagation may expose new trivial copy/constant propagation
2822 opportunities. */
2823 if (gimple_assign_single_p (use_stmt)
2824 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2825 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2826 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2828 tree result = get_lhs_or_phi_result (use_stmt);
2829 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2832 /* Propagation into these nodes may make certain edges in
2833 the CFG unexecutable. We want to identify them as PHI nodes
2834 at the destination of those unexecutable edges may become
2835 degenerates. */
2836 else if (gimple_code (use_stmt) == GIMPLE_COND
2837 || gimple_code (use_stmt) == GIMPLE_SWITCH
2838 || gimple_code (use_stmt) == GIMPLE_GOTO)
2840 tree val;
2842 if (gimple_code (use_stmt) == GIMPLE_COND)
2843 val = fold_binary_loc (gimple_location (use_stmt),
2844 gimple_cond_code (use_stmt),
2845 boolean_type_node,
2846 gimple_cond_lhs (use_stmt),
2847 gimple_cond_rhs (use_stmt));
2848 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2849 val = gimple_switch_index (use_stmt);
2850 else
2851 val = gimple_goto_dest (use_stmt);
2853 if (val && is_gimple_min_invariant (val))
2855 basic_block bb = gimple_bb (use_stmt);
2856 edge te = find_taken_edge (bb, val);
2857 edge_iterator ei;
2858 edge e;
2859 gimple_stmt_iterator gsi, psi;
2861 /* Remove all outgoing edges except TE. */
2862 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2864 if (e != te)
2866 /* Mark all the PHI nodes at the destination of
2867 the unexecutable edge as interesting. */
2868 for (psi = gsi_start_phis (e->dest);
2869 !gsi_end_p (psi);
2870 gsi_next (&psi))
2872 gimple phi = gsi_stmt (psi);
2874 tree result = gimple_phi_result (phi);
2875 int version = SSA_NAME_VERSION (result);
2877 bitmap_set_bit (interesting_names, version);
2880 te->probability += e->probability;
2882 te->count += e->count;
2883 remove_edge (e);
2884 cfg_altered = true;
2886 else
2887 ei_next (&ei);
2890 gsi = gsi_last_bb (gimple_bb (use_stmt));
2891 gsi_remove (&gsi, true);
2893 /* And fixup the flags on the single remaining edge. */
2894 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2895 te->flags &= ~EDGE_ABNORMAL;
2896 te->flags |= EDGE_FALLTHRU;
2897 if (te->probability > REG_BR_PROB_BASE)
2898 te->probability = REG_BR_PROB_BASE;
2903 /* Ensure there is nothing else to do. */
2904 gcc_assert (!all || has_zero_uses (lhs));
2906 /* If we were able to propagate away all uses of LHS, then
2907 we can remove STMT. */
2908 if (all)
2909 remove_stmt_or_phi (stmt);
2913 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2914 a statement that is a trivial copy or constant initialization.
2916 Attempt to eliminate T by propagating its RHS into all uses of
2917 its LHS. This may in turn set new bits in INTERESTING_NAMES
2918 for nodes we want to revisit later.
2920 All exit paths should clear INTERESTING_NAMES for the result
2921 of STMT. */
2923 static void
2924 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2926 tree lhs = get_lhs_or_phi_result (stmt);
2927 tree rhs;
2928 int version = SSA_NAME_VERSION (lhs);
2930 /* If the LHS of this statement or PHI has no uses, then we can
2931 just eliminate it. This can occur if, for example, the PHI
2932 was created by block duplication due to threading and its only
2933 use was in the conditional at the end of the block which was
2934 deleted. */
2935 if (has_zero_uses (lhs))
2937 bitmap_clear_bit (interesting_names, version);
2938 remove_stmt_or_phi (stmt);
2939 return;
2942 /* Get the RHS of the assignment or PHI node if the PHI is a
2943 degenerate. */
2944 rhs = get_rhs_or_phi_arg (stmt);
2945 if (!rhs)
2947 bitmap_clear_bit (interesting_names, version);
2948 return;
2951 if (!virtual_operand_p (lhs))
2952 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2953 else
2955 gimple use_stmt;
2956 imm_use_iterator iter;
2957 use_operand_p use_p;
2958 /* For virtual operands we have to propagate into all uses as
2959 otherwise we will create overlapping life-ranges. */
2960 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2961 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2962 SET_USE (use_p, rhs);
2963 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2964 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
2965 remove_stmt_or_phi (stmt);
2968 /* Note that STMT may well have been deleted by now, so do
2969 not access it, instead use the saved version # to clear
2970 T's entry in the worklist. */
2971 bitmap_clear_bit (interesting_names, version);
2974 /* The first phase in degenerate PHI elimination.
2976 Eliminate the degenerate PHIs in BB, then recurse on the
2977 dominator children of BB. */
2979 static void
2980 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2982 gimple_stmt_iterator gsi;
2983 basic_block son;
2985 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2987 gimple phi = gsi_stmt (gsi);
2989 eliminate_const_or_copy (phi, interesting_names);
2992 /* Recurse into the dominator children of BB. */
2993 for (son = first_dom_son (CDI_DOMINATORS, bb);
2994 son;
2995 son = next_dom_son (CDI_DOMINATORS, son))
2996 eliminate_degenerate_phis_1 (son, interesting_names);
3000 /* A very simple pass to eliminate degenerate PHI nodes from the
3001 IL. This is meant to be fast enough to be able to be run several
3002 times in the optimization pipeline.
3004 Certain optimizations, particularly those which duplicate blocks
3005 or remove edges from the CFG can create or expose PHIs which are
3006 trivial copies or constant initializations.
3008 While we could pick up these optimizations in DOM or with the
3009 combination of copy-prop and CCP, those solutions are far too
3010 heavy-weight for our needs.
3012 This implementation has two phases so that we can efficiently
3013 eliminate the first order degenerate PHIs and second order
3014 degenerate PHIs.
3016 The first phase performs a dominator walk to identify and eliminate
3017 the vast majority of the degenerate PHIs. When a degenerate PHI
3018 is identified and eliminated any affected statements or PHIs
3019 are put on a worklist.
3021 The second phase eliminates degenerate PHIs and trivial copies
3022 or constant initializations using the worklist. This is how we
3023 pick up the secondary optimization opportunities with minimal
3024 cost. */
3026 static unsigned int
3027 eliminate_degenerate_phis (void)
3029 bitmap interesting_names;
3030 bitmap interesting_names1;
3032 /* Bitmap of blocks which need EH information updated. We can not
3033 update it on-the-fly as doing so invalidates the dominator tree. */
3034 need_eh_cleanup = BITMAP_ALLOC (NULL);
3036 /* INTERESTING_NAMES is effectively our worklist, indexed by
3037 SSA_NAME_VERSION.
3039 A set bit indicates that the statement or PHI node which
3040 defines the SSA_NAME should be (re)examined to determine if
3041 it has become a degenerate PHI or trivial const/copy propagation
3042 opportunity.
3044 Experiments have show we generally get better compilation
3045 time behavior with bitmaps rather than sbitmaps. */
3046 interesting_names = BITMAP_ALLOC (NULL);
3047 interesting_names1 = BITMAP_ALLOC (NULL);
3049 calculate_dominance_info (CDI_DOMINATORS);
3050 cfg_altered = false;
3052 /* First phase. Eliminate degenerate PHIs via a dominator
3053 walk of the CFG.
3055 Experiments have indicated that we generally get better
3056 compile-time behavior by visiting blocks in the first
3057 phase in dominator order. Presumably this is because walking
3058 in dominator order leaves fewer PHIs for later examination
3059 by the worklist phase. */
3060 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
3062 /* Second phase. Eliminate second order degenerate PHIs as well
3063 as trivial copies or constant initializations identified by
3064 the first phase or this phase. Basically we keep iterating
3065 until our set of INTERESTING_NAMEs is empty. */
3066 while (!bitmap_empty_p (interesting_names))
3068 unsigned int i;
3069 bitmap_iterator bi;
3071 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
3072 changed during the loop. Copy it to another bitmap and
3073 use that. */
3074 bitmap_copy (interesting_names1, interesting_names);
3076 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
3078 tree name = ssa_name (i);
3080 /* Ignore SSA_NAMEs that have been released because
3081 their defining statement was deleted (unreachable). */
3082 if (name)
3083 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
3084 interesting_names);
3088 if (cfg_altered)
3090 free_dominance_info (CDI_DOMINATORS);
3091 /* If we changed the CFG schedule loops for fixup by cfgcleanup. */
3092 if (current_loops)
3093 loops_state_set (LOOPS_NEED_FIXUP);
3096 /* Propagation of const and copies may make some EH edges dead. Purge
3097 such edges from the CFG as needed. */
3098 if (!bitmap_empty_p (need_eh_cleanup))
3100 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
3101 BITMAP_FREE (need_eh_cleanup);
3104 BITMAP_FREE (interesting_names);
3105 BITMAP_FREE (interesting_names1);
3106 return 0;
3109 namespace {
3111 const pass_data pass_data_phi_only_cprop =
3113 GIMPLE_PASS, /* type */
3114 "phicprop", /* name */
3115 OPTGROUP_NONE, /* optinfo_flags */
3116 true, /* has_gate */
3117 true, /* has_execute */
3118 TV_TREE_PHI_CPROP, /* tv_id */
3119 ( PROP_cfg | PROP_ssa ), /* properties_required */
3120 0, /* properties_provided */
3121 0, /* properties_destroyed */
3122 0, /* todo_flags_start */
3123 ( TODO_cleanup_cfg | TODO_verify_ssa
3124 | TODO_verify_stmts
3125 | TODO_update_ssa ), /* todo_flags_finish */
3128 class pass_phi_only_cprop : public gimple_opt_pass
3130 public:
3131 pass_phi_only_cprop (gcc::context *ctxt)
3132 : gimple_opt_pass (pass_data_phi_only_cprop, ctxt)
3135 /* opt_pass methods: */
3136 opt_pass * clone () { return new pass_phi_only_cprop (m_ctxt); }
3137 bool gate () { return gate_dominator (); }
3138 unsigned int execute () { return eliminate_degenerate_phis (); }
3140 }; // class pass_phi_only_cprop
3142 } // anon namespace
3144 gimple_opt_pass *
3145 make_pass_phi_only_cprop (gcc::context *ctxt)
3147 return new pass_phi_only_cprop (ctxt);