2012-11-28 Oleg Raikhman <oleg@adapteva.com>
[official-gcc.git] / gcc / tree-ssa-dom.c
blob0c2158c6e5eb4e252745ac9fe5f597563dde13ee
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "function.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-flow.h"
34 #include "domwalk.h"
35 #include "tree-pass.h"
36 #include "tree-ssa-propagate.h"
37 #include "langhooks.h"
38 #include "params.h"
40 /* This file implements optimizations on the dominator tree. */
42 /* Representation of a "naked" right-hand-side expression, to be used
43 in recording available expressions in the expression hash table. */
45 enum expr_kind
47 EXPR_SINGLE,
48 EXPR_UNARY,
49 EXPR_BINARY,
50 EXPR_TERNARY,
51 EXPR_CALL,
52 EXPR_PHI
55 struct hashable_expr
57 tree type;
58 enum expr_kind kind;
59 union {
60 struct { tree rhs; } single;
61 struct { enum tree_code op; tree opnd; } unary;
62 struct { enum tree_code op; tree opnd0, opnd1; } binary;
63 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
64 struct { gimple fn_from; bool pure; size_t nargs; tree *args; } call;
65 struct { size_t nargs; tree *args; } phi;
66 } ops;
69 /* Structure for recording known values of a conditional expression
70 at the exits from its block. */
72 typedef struct cond_equivalence_s
74 struct hashable_expr cond;
75 tree value;
76 } cond_equivalence;
79 /* Structure for recording edge equivalences as well as any pending
80 edge redirections during the dominator optimizer.
82 Computing and storing the edge equivalences instead of creating
83 them on-demand can save significant amounts of time, particularly
84 for pathological cases involving switch statements.
86 These structures live for a single iteration of the dominator
87 optimizer in the edge's AUX field. At the end of an iteration we
88 free each of these structures and update the AUX field to point
89 to any requested redirection target (the code for updating the
90 CFG and SSA graph for edge redirection expects redirection edge
91 targets to be in the AUX field for each edge. */
93 struct edge_info
95 /* If this edge creates a simple equivalence, the LHS and RHS of
96 the equivalence will be stored here. */
97 tree lhs;
98 tree rhs;
100 /* Traversing an edge may also indicate one or more particular conditions
101 are true or false. */
102 vec<cond_equivalence> cond_equivalences;
105 /* Hash table with expressions made available during the renaming process.
106 When an assignment of the form X_i = EXPR is found, the statement is
107 stored in this table. If the same expression EXPR is later found on the
108 RHS of another statement, it is replaced with X_i (thus performing
109 global redundancy elimination). Similarly as we pass through conditionals
110 we record the conditional itself as having either a true or false value
111 in this table. */
112 static htab_t avail_exprs;
114 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
115 expressions it enters into the hash table along with a marker entry
116 (null). When we finish processing the block, we pop off entries and
117 remove the expressions from the global hash table until we hit the
118 marker. */
119 typedef struct expr_hash_elt * expr_hash_elt_t;
121 static vec<expr_hash_elt_t> avail_exprs_stack;
123 /* Structure for entries in the expression hash table. */
125 struct expr_hash_elt
127 /* The value (lhs) of this expression. */
128 tree lhs;
130 /* The expression (rhs) we want to record. */
131 struct hashable_expr expr;
133 /* The stmt pointer if this element corresponds to a statement. */
134 gimple stmt;
136 /* The hash value for RHS. */
137 hashval_t hash;
139 /* A unique stamp, typically the address of the hash
140 element itself, used in removing entries from the table. */
141 struct expr_hash_elt *stamp;
144 /* Stack of dest,src pairs that need to be restored during finalization.
146 A NULL entry is used to mark the end of pairs which need to be
147 restored during finalization of this block. */
148 static vec<tree> const_and_copies_stack;
150 /* Track whether or not we have changed the control flow graph. */
151 static bool cfg_altered;
153 /* Bitmap of blocks that have had EH statements cleaned. We should
154 remove their dead edges eventually. */
155 static bitmap need_eh_cleanup;
157 /* Statistics for dominator optimizations. */
158 struct opt_stats_d
160 long num_stmts;
161 long num_exprs_considered;
162 long num_re;
163 long num_const_prop;
164 long num_copy_prop;
167 static struct opt_stats_d opt_stats;
169 /* Local functions. */
170 static void optimize_stmt (basic_block, gimple_stmt_iterator);
171 static tree lookup_avail_expr (gimple, bool);
172 static hashval_t avail_expr_hash (const void *);
173 static hashval_t real_avail_expr_hash (const void *);
174 static int avail_expr_eq (const void *, const void *);
175 static void htab_statistics (FILE *, htab_t);
176 static void record_cond (cond_equivalence *);
177 static void record_const_or_copy (tree, tree);
178 static void record_equality (tree, tree);
179 static void record_equivalences_from_phis (basic_block);
180 static void record_equivalences_from_incoming_edge (basic_block);
181 static void eliminate_redundant_computations (gimple_stmt_iterator *);
182 static void record_equivalences_from_stmt (gimple, int);
183 static void dom_thread_across_edge (struct dom_walk_data *, edge);
184 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
185 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
186 static void remove_local_expressions_from_table (void);
187 static void restore_vars_to_original_value (void);
188 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
191 /* Given a statement STMT, initialize the hash table element pointed to
192 by ELEMENT. */
194 static void
195 initialize_hash_element (gimple stmt, tree lhs,
196 struct expr_hash_elt *element)
198 enum gimple_code code = gimple_code (stmt);
199 struct hashable_expr *expr = &element->expr;
201 if (code == GIMPLE_ASSIGN)
203 enum tree_code subcode = gimple_assign_rhs_code (stmt);
205 switch (get_gimple_rhs_class (subcode))
207 case GIMPLE_SINGLE_RHS:
208 expr->kind = EXPR_SINGLE;
209 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
210 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
211 break;
212 case GIMPLE_UNARY_RHS:
213 expr->kind = EXPR_UNARY;
214 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
215 expr->ops.unary.op = subcode;
216 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
217 break;
218 case GIMPLE_BINARY_RHS:
219 expr->kind = EXPR_BINARY;
220 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
221 expr->ops.binary.op = subcode;
222 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
223 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
224 break;
225 case GIMPLE_TERNARY_RHS:
226 expr->kind = EXPR_TERNARY;
227 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
228 expr->ops.ternary.op = subcode;
229 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
230 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
231 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
232 break;
233 default:
234 gcc_unreachable ();
237 else if (code == GIMPLE_COND)
239 expr->type = boolean_type_node;
240 expr->kind = EXPR_BINARY;
241 expr->ops.binary.op = gimple_cond_code (stmt);
242 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
243 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
245 else if (code == GIMPLE_CALL)
247 size_t nargs = gimple_call_num_args (stmt);
248 size_t i;
250 gcc_assert (gimple_call_lhs (stmt));
252 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
253 expr->kind = EXPR_CALL;
254 expr->ops.call.fn_from = stmt;
256 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
257 expr->ops.call.pure = true;
258 else
259 expr->ops.call.pure = false;
261 expr->ops.call.nargs = nargs;
262 expr->ops.call.args = XCNEWVEC (tree, nargs);
263 for (i = 0; i < nargs; i++)
264 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
266 else if (code == GIMPLE_SWITCH)
268 expr->type = TREE_TYPE (gimple_switch_index (stmt));
269 expr->kind = EXPR_SINGLE;
270 expr->ops.single.rhs = gimple_switch_index (stmt);
272 else if (code == GIMPLE_GOTO)
274 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
275 expr->kind = EXPR_SINGLE;
276 expr->ops.single.rhs = gimple_goto_dest (stmt);
278 else if (code == GIMPLE_PHI)
280 size_t nargs = gimple_phi_num_args (stmt);
281 size_t i;
283 expr->type = TREE_TYPE (gimple_phi_result (stmt));
284 expr->kind = EXPR_PHI;
285 expr->ops.phi.nargs = nargs;
286 expr->ops.phi.args = XCNEWVEC (tree, nargs);
288 for (i = 0; i < nargs; i++)
289 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
291 else
292 gcc_unreachable ();
294 element->lhs = lhs;
295 element->stmt = stmt;
296 element->hash = avail_expr_hash (element);
297 element->stamp = element;
300 /* Given a conditional expression COND as a tree, initialize
301 a hashable_expr expression EXPR. The conditional must be a
302 comparison or logical negation. A constant or a variable is
303 not permitted. */
305 static void
306 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
308 expr->type = boolean_type_node;
310 if (COMPARISON_CLASS_P (cond))
312 expr->kind = EXPR_BINARY;
313 expr->ops.binary.op = TREE_CODE (cond);
314 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
315 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
317 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
319 expr->kind = EXPR_UNARY;
320 expr->ops.unary.op = TRUTH_NOT_EXPR;
321 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
323 else
324 gcc_unreachable ();
327 /* Given a hashable_expr expression EXPR and an LHS,
328 initialize the hash table element pointed to by ELEMENT. */
330 static void
331 initialize_hash_element_from_expr (struct hashable_expr *expr,
332 tree lhs,
333 struct expr_hash_elt *element)
335 element->expr = *expr;
336 element->lhs = lhs;
337 element->stmt = NULL;
338 element->hash = avail_expr_hash (element);
339 element->stamp = element;
342 /* Compare two hashable_expr structures for equivalence.
343 They are considered equivalent when the the expressions
344 they denote must necessarily be equal. The logic is intended
345 to follow that of operand_equal_p in fold-const.c */
347 static bool
348 hashable_expr_equal_p (const struct hashable_expr *expr0,
349 const struct hashable_expr *expr1)
351 tree type0 = expr0->type;
352 tree type1 = expr1->type;
354 /* If either type is NULL, there is nothing to check. */
355 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
356 return false;
358 /* If both types don't have the same signedness, precision, and mode,
359 then we can't consider them equal. */
360 if (type0 != type1
361 && (TREE_CODE (type0) == ERROR_MARK
362 || TREE_CODE (type1) == ERROR_MARK
363 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
364 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
365 || TYPE_MODE (type0) != TYPE_MODE (type1)))
366 return false;
368 if (expr0->kind != expr1->kind)
369 return false;
371 switch (expr0->kind)
373 case EXPR_SINGLE:
374 return operand_equal_p (expr0->ops.single.rhs,
375 expr1->ops.single.rhs, 0);
377 case EXPR_UNARY:
378 if (expr0->ops.unary.op != expr1->ops.unary.op)
379 return false;
381 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
382 || expr0->ops.unary.op == NON_LVALUE_EXPR)
383 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
384 return false;
386 return operand_equal_p (expr0->ops.unary.opnd,
387 expr1->ops.unary.opnd, 0);
389 case EXPR_BINARY:
390 if (expr0->ops.binary.op != expr1->ops.binary.op)
391 return false;
393 if (operand_equal_p (expr0->ops.binary.opnd0,
394 expr1->ops.binary.opnd0, 0)
395 && operand_equal_p (expr0->ops.binary.opnd1,
396 expr1->ops.binary.opnd1, 0))
397 return true;
399 /* For commutative ops, allow the other order. */
400 return (commutative_tree_code (expr0->ops.binary.op)
401 && operand_equal_p (expr0->ops.binary.opnd0,
402 expr1->ops.binary.opnd1, 0)
403 && operand_equal_p (expr0->ops.binary.opnd1,
404 expr1->ops.binary.opnd0, 0));
406 case EXPR_TERNARY:
407 if (expr0->ops.ternary.op != expr1->ops.ternary.op
408 || !operand_equal_p (expr0->ops.ternary.opnd2,
409 expr1->ops.ternary.opnd2, 0))
410 return false;
412 if (operand_equal_p (expr0->ops.ternary.opnd0,
413 expr1->ops.ternary.opnd0, 0)
414 && operand_equal_p (expr0->ops.ternary.opnd1,
415 expr1->ops.ternary.opnd1, 0))
416 return true;
418 /* For commutative ops, allow the other order. */
419 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
420 && operand_equal_p (expr0->ops.ternary.opnd0,
421 expr1->ops.ternary.opnd1, 0)
422 && operand_equal_p (expr0->ops.ternary.opnd1,
423 expr1->ops.ternary.opnd0, 0));
425 case EXPR_CALL:
427 size_t i;
429 /* If the calls are to different functions, then they
430 clearly cannot be equal. */
431 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
432 expr1->ops.call.fn_from))
433 return false;
435 if (! expr0->ops.call.pure)
436 return false;
438 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
439 return false;
441 for (i = 0; i < expr0->ops.call.nargs; i++)
442 if (! operand_equal_p (expr0->ops.call.args[i],
443 expr1->ops.call.args[i], 0))
444 return false;
446 return true;
449 case EXPR_PHI:
451 size_t i;
453 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
454 return false;
456 for (i = 0; i < expr0->ops.phi.nargs; i++)
457 if (! operand_equal_p (expr0->ops.phi.args[i],
458 expr1->ops.phi.args[i], 0))
459 return false;
461 return true;
464 default:
465 gcc_unreachable ();
469 /* Compute a hash value for a hashable_expr value EXPR and a
470 previously accumulated hash value VAL. If two hashable_expr
471 values compare equal with hashable_expr_equal_p, they must
472 hash to the same value, given an identical value of VAL.
473 The logic is intended to follow iterative_hash_expr in tree.c. */
475 static hashval_t
476 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
478 switch (expr->kind)
480 case EXPR_SINGLE:
481 val = iterative_hash_expr (expr->ops.single.rhs, val);
482 break;
484 case EXPR_UNARY:
485 val = iterative_hash_object (expr->ops.unary.op, val);
487 /* Make sure to include signedness in the hash computation.
488 Don't hash the type, that can lead to having nodes which
489 compare equal according to operand_equal_p, but which
490 have different hash codes. */
491 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
492 || expr->ops.unary.op == NON_LVALUE_EXPR)
493 val += TYPE_UNSIGNED (expr->type);
495 val = iterative_hash_expr (expr->ops.unary.opnd, val);
496 break;
498 case EXPR_BINARY:
499 val = iterative_hash_object (expr->ops.binary.op, val);
500 if (commutative_tree_code (expr->ops.binary.op))
501 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
502 expr->ops.binary.opnd1, val);
503 else
505 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
506 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
508 break;
510 case EXPR_TERNARY:
511 val = iterative_hash_object (expr->ops.ternary.op, val);
512 if (commutative_ternary_tree_code (expr->ops.ternary.op))
513 val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
514 expr->ops.ternary.opnd1, val);
515 else
517 val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
518 val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
520 val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
521 break;
523 case EXPR_CALL:
525 size_t i;
526 enum tree_code code = CALL_EXPR;
527 gimple fn_from;
529 val = iterative_hash_object (code, val);
530 fn_from = expr->ops.call.fn_from;
531 if (gimple_call_internal_p (fn_from))
532 val = iterative_hash_hashval_t
533 ((hashval_t) gimple_call_internal_fn (fn_from), val);
534 else
535 val = iterative_hash_expr (gimple_call_fn (fn_from), val);
536 for (i = 0; i < expr->ops.call.nargs; i++)
537 val = iterative_hash_expr (expr->ops.call.args[i], val);
539 break;
541 case EXPR_PHI:
543 size_t i;
545 for (i = 0; i < expr->ops.phi.nargs; i++)
546 val = iterative_hash_expr (expr->ops.phi.args[i], val);
548 break;
550 default:
551 gcc_unreachable ();
554 return val;
557 /* Print a diagnostic dump of an expression hash table entry. */
559 static void
560 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
562 if (element->stmt)
563 fprintf (stream, "STMT ");
564 else
565 fprintf (stream, "COND ");
567 if (element->lhs)
569 print_generic_expr (stream, element->lhs, 0);
570 fprintf (stream, " = ");
573 switch (element->expr.kind)
575 case EXPR_SINGLE:
576 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
577 break;
579 case EXPR_UNARY:
580 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
581 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
582 break;
584 case EXPR_BINARY:
585 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
586 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
587 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
588 break;
590 case EXPR_TERNARY:
591 fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
592 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
593 fputs (", ", stream);
594 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
595 fputs (", ", stream);
596 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
597 fputs (">", stream);
598 break;
600 case EXPR_CALL:
602 size_t i;
603 size_t nargs = element->expr.ops.call.nargs;
604 gimple fn_from;
606 fn_from = element->expr.ops.call.fn_from;
607 if (gimple_call_internal_p (fn_from))
608 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
609 stream);
610 else
611 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
612 fprintf (stream, " (");
613 for (i = 0; i < nargs; i++)
615 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
616 if (i + 1 < nargs)
617 fprintf (stream, ", ");
619 fprintf (stream, ")");
621 break;
623 case EXPR_PHI:
625 size_t i;
626 size_t nargs = element->expr.ops.phi.nargs;
628 fprintf (stream, "PHI <");
629 for (i = 0; i < nargs; i++)
631 print_generic_expr (stream, element->expr.ops.phi.args[i], 0);
632 if (i + 1 < nargs)
633 fprintf (stream, ", ");
635 fprintf (stream, ">");
637 break;
639 fprintf (stream, "\n");
641 if (element->stmt)
643 fprintf (stream, " ");
644 print_gimple_stmt (stream, element->stmt, 0, 0);
648 /* Delete variable sized pieces of the expr_hash_elt ELEMENT. */
650 static void
651 free_expr_hash_elt_contents (struct expr_hash_elt *element)
653 if (element->expr.kind == EXPR_CALL)
654 free (element->expr.ops.call.args);
655 else if (element->expr.kind == EXPR_PHI)
656 free (element->expr.ops.phi.args);
659 /* Delete an expr_hash_elt and reclaim its storage. */
661 static void
662 free_expr_hash_elt (void *elt)
664 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
665 free_expr_hash_elt_contents (element);
666 free (element);
669 /* Allocate an EDGE_INFO for edge E and attach it to E.
670 Return the new EDGE_INFO structure. */
672 static struct edge_info *
673 allocate_edge_info (edge e)
675 struct edge_info *edge_info;
677 edge_info = XCNEW (struct edge_info);
679 e->aux = edge_info;
680 return edge_info;
683 /* Free all EDGE_INFO structures associated with edges in the CFG.
684 If a particular edge can be threaded, copy the redirection
685 target from the EDGE_INFO structure into the edge's AUX field
686 as required by code to update the CFG and SSA graph for
687 jump threading. */
689 static void
690 free_all_edge_infos (void)
692 basic_block bb;
693 edge_iterator ei;
694 edge e;
696 FOR_EACH_BB (bb)
698 FOR_EACH_EDGE (e, ei, bb->preds)
700 struct edge_info *edge_info = (struct edge_info *) e->aux;
702 if (edge_info)
704 edge_info->cond_equivalences.release ();
705 free (edge_info);
706 e->aux = NULL;
712 /* Jump threading, redundancy elimination and const/copy propagation.
714 This pass may expose new symbols that need to be renamed into SSA. For
715 every new symbol exposed, its corresponding bit will be set in
716 VARS_TO_RENAME. */
718 static unsigned int
719 tree_ssa_dominator_optimize (void)
721 struct dom_walk_data walk_data;
723 memset (&opt_stats, 0, sizeof (opt_stats));
725 /* Create our hash tables. */
726 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
727 avail_exprs_stack.create (20);
728 const_and_copies_stack.create (20);
729 need_eh_cleanup = BITMAP_ALLOC (NULL);
731 /* Setup callbacks for the generic dominator tree walker. */
732 walk_data.dom_direction = CDI_DOMINATORS;
733 walk_data.initialize_block_local_data = NULL;
734 walk_data.before_dom_children = dom_opt_enter_block;
735 walk_data.after_dom_children = dom_opt_leave_block;
736 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
737 When we attach more stuff we'll need to fill this out with a real
738 structure. */
739 walk_data.global_data = NULL;
740 walk_data.block_local_data_size = 0;
742 /* Now initialize the dominator walker. */
743 init_walk_dominator_tree (&walk_data);
745 calculate_dominance_info (CDI_DOMINATORS);
746 cfg_altered = false;
748 /* We need to know loop structures in order to avoid destroying them
749 in jump threading. Note that we still can e.g. thread through loop
750 headers to an exit edge, or through loop header to the loop body, assuming
751 that we update the loop info. */
752 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
754 /* Initialize the value-handle array. */
755 threadedge_initialize_values ();
757 /* We need accurate information regarding back edges in the CFG
758 for jump threading; this may include back edges that are not part of
759 a single loop. */
760 mark_dfs_back_edges ();
762 /* Recursively walk the dominator tree optimizing statements. */
763 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
766 gimple_stmt_iterator gsi;
767 basic_block bb;
768 FOR_EACH_BB (bb)
770 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
771 update_stmt_if_modified (gsi_stmt (gsi));
775 /* If we exposed any new variables, go ahead and put them into
776 SSA form now, before we handle jump threading. This simplifies
777 interactions between rewriting of _DECL nodes into SSA form
778 and rewriting SSA_NAME nodes into SSA form after block
779 duplication and CFG manipulation. */
780 update_ssa (TODO_update_ssa);
782 free_all_edge_infos ();
784 /* Thread jumps, creating duplicate blocks as needed. */
785 cfg_altered |= thread_through_all_blocks (first_pass_instance);
787 if (cfg_altered)
788 free_dominance_info (CDI_DOMINATORS);
790 /* Removal of statements may make some EH edges dead. Purge
791 such edges from the CFG as needed. */
792 if (!bitmap_empty_p (need_eh_cleanup))
794 unsigned i;
795 bitmap_iterator bi;
797 /* Jump threading may have created forwarder blocks from blocks
798 needing EH cleanup; the new successor of these blocks, which
799 has inherited from the original block, needs the cleanup.
800 Don't clear bits in the bitmap, as that can break the bitmap
801 iterator. */
802 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
804 basic_block bb = BASIC_BLOCK (i);
805 if (bb == NULL)
806 continue;
807 while (single_succ_p (bb)
808 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
809 bb = single_succ (bb);
810 if (bb == EXIT_BLOCK_PTR)
811 continue;
812 if ((unsigned) bb->index != i)
813 bitmap_set_bit (need_eh_cleanup, bb->index);
816 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
817 bitmap_clear (need_eh_cleanup);
820 statistics_counter_event (cfun, "Redundant expressions eliminated",
821 opt_stats.num_re);
822 statistics_counter_event (cfun, "Constants propagated",
823 opt_stats.num_const_prop);
824 statistics_counter_event (cfun, "Copies propagated",
825 opt_stats.num_copy_prop);
827 /* Debugging dumps. */
828 if (dump_file && (dump_flags & TDF_STATS))
829 dump_dominator_optimization_stats (dump_file);
831 loop_optimizer_finalize ();
833 /* Delete our main hashtable. */
834 htab_delete (avail_exprs);
836 /* And finalize the dominator walker. */
837 fini_walk_dominator_tree (&walk_data);
839 /* Free asserted bitmaps and stacks. */
840 BITMAP_FREE (need_eh_cleanup);
842 avail_exprs_stack.release ();
843 const_and_copies_stack.release ();
845 /* Free the value-handle array. */
846 threadedge_finalize_values ();
847 ssa_name_values.release ();
849 return 0;
852 static bool
853 gate_dominator (void)
855 return flag_tree_dom != 0;
858 struct gimple_opt_pass pass_dominator =
861 GIMPLE_PASS,
862 "dom", /* name */
863 OPTGROUP_NONE, /* optinfo_flags */
864 gate_dominator, /* gate */
865 tree_ssa_dominator_optimize, /* execute */
866 NULL, /* sub */
867 NULL, /* next */
868 0, /* static_pass_number */
869 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
870 PROP_cfg | PROP_ssa, /* properties_required */
871 0, /* properties_provided */
872 0, /* properties_destroyed */
873 0, /* todo_flags_start */
874 TODO_cleanup_cfg
875 | TODO_update_ssa
876 | TODO_verify_ssa
877 | TODO_verify_flow /* todo_flags_finish */
882 /* Given a conditional statement CONDSTMT, convert the
883 condition to a canonical form. */
885 static void
886 canonicalize_comparison (gimple condstmt)
888 tree op0;
889 tree op1;
890 enum tree_code code;
892 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
894 op0 = gimple_cond_lhs (condstmt);
895 op1 = gimple_cond_rhs (condstmt);
897 code = gimple_cond_code (condstmt);
899 /* If it would be profitable to swap the operands, then do so to
900 canonicalize the statement, enabling better optimization.
902 By placing canonicalization of such expressions here we
903 transparently keep statements in canonical form, even
904 when the statement is modified. */
905 if (tree_swap_operands_p (op0, op1, false))
907 /* For relationals we need to swap the operands
908 and change the code. */
909 if (code == LT_EXPR
910 || code == GT_EXPR
911 || code == LE_EXPR
912 || code == GE_EXPR)
914 code = swap_tree_comparison (code);
916 gimple_cond_set_code (condstmt, code);
917 gimple_cond_set_lhs (condstmt, op1);
918 gimple_cond_set_rhs (condstmt, op0);
920 update_stmt (condstmt);
925 /* Initialize local stacks for this optimizer and record equivalences
926 upon entry to BB. Equivalences can come from the edge traversed to
927 reach BB or they may come from PHI nodes at the start of BB. */
929 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
930 LIMIT entries left in LOCALs. */
932 static void
933 remove_local_expressions_from_table (void)
935 /* Remove all the expressions made available in this block. */
936 while (avail_exprs_stack.length () > 0)
938 expr_hash_elt_t victim = avail_exprs_stack.pop ();
939 void **slot;
941 if (victim == NULL)
942 break;
944 /* This must precede the actual removal from the hash table,
945 as ELEMENT and the table entry may share a call argument
946 vector which will be freed during removal. */
947 if (dump_file && (dump_flags & TDF_DETAILS))
949 fprintf (dump_file, "<<<< ");
950 print_expr_hash_elt (dump_file, victim);
953 slot = htab_find_slot_with_hash (avail_exprs,
954 victim, victim->hash, NO_INSERT);
955 gcc_assert (slot && *slot == (void *) victim);
956 htab_clear_slot (avail_exprs, slot);
960 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
961 CONST_AND_COPIES to its original state, stopping when we hit a
962 NULL marker. */
964 static void
965 restore_vars_to_original_value (void)
967 while (const_and_copies_stack.length () > 0)
969 tree prev_value, dest;
971 dest = const_and_copies_stack.pop ();
973 if (dest == NULL)
974 break;
976 if (dump_file && (dump_flags & TDF_DETAILS))
978 fprintf (dump_file, "<<<< COPY ");
979 print_generic_expr (dump_file, dest, 0);
980 fprintf (dump_file, " = ");
981 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
982 fprintf (dump_file, "\n");
985 prev_value = const_and_copies_stack.pop ();
986 set_ssa_name_value (dest, prev_value);
990 /* A trivial wrapper so that we can present the generic jump
991 threading code with a simple API for simplifying statements. */
992 static tree
993 simplify_stmt_for_jump_threading (gimple stmt,
994 gimple within_stmt ATTRIBUTE_UNUSED)
996 return lookup_avail_expr (stmt, false);
999 /* Wrapper for common code to attempt to thread an edge. For example,
1000 it handles lazily building the dummy condition and the bookkeeping
1001 when jump threading is successful. */
1003 static void
1004 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
1006 if (! walk_data->global_data)
1008 gimple dummy_cond =
1009 gimple_build_cond (NE_EXPR,
1010 integer_zero_node, integer_zero_node,
1011 NULL, NULL);
1012 walk_data->global_data = dummy_cond;
1015 thread_across_edge ((gimple) walk_data->global_data, e, false,
1016 &const_and_copies_stack,
1017 simplify_stmt_for_jump_threading);
1020 /* PHI nodes can create equivalences too.
1022 Ignoring any alternatives which are the same as the result, if
1023 all the alternatives are equal, then the PHI node creates an
1024 equivalence. */
1026 static void
1027 record_equivalences_from_phis (basic_block bb)
1029 gimple_stmt_iterator gsi;
1031 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1033 gimple phi = gsi_stmt (gsi);
1035 tree lhs = gimple_phi_result (phi);
1036 tree rhs = NULL;
1037 size_t i;
1039 for (i = 0; i < gimple_phi_num_args (phi); i++)
1041 tree t = gimple_phi_arg_def (phi, i);
1043 /* Ignore alternatives which are the same as our LHS. Since
1044 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1045 can simply compare pointers. */
1046 if (lhs == t)
1047 continue;
1049 /* If we have not processed an alternative yet, then set
1050 RHS to this alternative. */
1051 if (rhs == NULL)
1052 rhs = t;
1053 /* If we have processed an alternative (stored in RHS), then
1054 see if it is equal to this one. If it isn't, then stop
1055 the search. */
1056 else if (! operand_equal_for_phi_arg_p (rhs, t))
1057 break;
1060 /* If we had no interesting alternatives, then all the RHS alternatives
1061 must have been the same as LHS. */
1062 if (!rhs)
1063 rhs = lhs;
1065 /* If we managed to iterate through each PHI alternative without
1066 breaking out of the loop, then we have a PHI which may create
1067 a useful equivalence. We do not need to record unwind data for
1068 this, since this is a true assignment and not an equivalence
1069 inferred from a comparison. All uses of this ssa name are dominated
1070 by this assignment, so unwinding just costs time and space. */
1071 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
1072 set_ssa_name_value (lhs, rhs);
1076 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1077 return that edge. Otherwise return NULL. */
1078 static edge
1079 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1081 edge retval = NULL;
1082 edge e;
1083 edge_iterator ei;
1085 FOR_EACH_EDGE (e, ei, bb->preds)
1087 /* A loop back edge can be identified by the destination of
1088 the edge dominating the source of the edge. */
1089 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1090 continue;
1092 /* If we have already seen a non-loop edge, then we must have
1093 multiple incoming non-loop edges and thus we return NULL. */
1094 if (retval)
1095 return NULL;
1097 /* This is the first non-loop incoming edge we have found. Record
1098 it. */
1099 retval = e;
1102 return retval;
1105 /* Record any equivalences created by the incoming edge to BB. If BB
1106 has more than one incoming edge, then no equivalence is created. */
1108 static void
1109 record_equivalences_from_incoming_edge (basic_block bb)
1111 edge e;
1112 basic_block parent;
1113 struct edge_info *edge_info;
1115 /* If our parent block ended with a control statement, then we may be
1116 able to record some equivalences based on which outgoing edge from
1117 the parent was followed. */
1118 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1120 e = single_incoming_edge_ignoring_loop_edges (bb);
1122 /* If we had a single incoming edge from our parent block, then enter
1123 any data associated with the edge into our tables. */
1124 if (e && e->src == parent)
1126 unsigned int i;
1128 edge_info = (struct edge_info *) e->aux;
1130 if (edge_info)
1132 tree lhs = edge_info->lhs;
1133 tree rhs = edge_info->rhs;
1134 cond_equivalence *eq;
1136 if (lhs)
1137 record_equality (lhs, rhs);
1139 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
1140 record_cond (eq);
1145 /* Dump SSA statistics on FILE. */
1147 void
1148 dump_dominator_optimization_stats (FILE *file)
1150 fprintf (file, "Total number of statements: %6ld\n\n",
1151 opt_stats.num_stmts);
1152 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1153 opt_stats.num_exprs_considered);
1155 fprintf (file, "\nHash table statistics:\n");
1157 fprintf (file, " avail_exprs: ");
1158 htab_statistics (file, avail_exprs);
1162 /* Dump SSA statistics on stderr. */
1164 DEBUG_FUNCTION void
1165 debug_dominator_optimization_stats (void)
1167 dump_dominator_optimization_stats (stderr);
1171 /* Dump statistics for the hash table HTAB. */
1173 static void
1174 htab_statistics (FILE *file, htab_t htab)
1176 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1177 (long) htab_size (htab),
1178 (long) htab_elements (htab),
1179 htab_collisions (htab));
1183 /* Enter condition equivalence into the expression hash table.
1184 This indicates that a conditional expression has a known
1185 boolean value. */
1187 static void
1188 record_cond (cond_equivalence *p)
1190 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1191 void **slot;
1193 initialize_hash_element_from_expr (&p->cond, p->value, element);
1195 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1196 element->hash, INSERT);
1197 if (*slot == NULL)
1199 *slot = (void *) element;
1201 if (dump_file && (dump_flags & TDF_DETAILS))
1203 fprintf (dump_file, "1>>> ");
1204 print_expr_hash_elt (dump_file, element);
1207 avail_exprs_stack.safe_push (element);
1209 else
1210 free_expr_hash_elt (element);
1213 /* Build a cond_equivalence record indicating that the comparison
1214 CODE holds between operands OP0 and OP1 and push it to **P. */
1216 static void
1217 build_and_record_new_cond (enum tree_code code,
1218 tree op0, tree op1,
1219 vec<cond_equivalence> *p)
1221 cond_equivalence c;
1222 struct hashable_expr *cond = &c.cond;
1224 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1226 cond->type = boolean_type_node;
1227 cond->kind = EXPR_BINARY;
1228 cond->ops.binary.op = code;
1229 cond->ops.binary.opnd0 = op0;
1230 cond->ops.binary.opnd1 = op1;
1232 c.value = boolean_true_node;
1233 p->safe_push (c);
1236 /* Record that COND is true and INVERTED is false into the edge information
1237 structure. Also record that any conditions dominated by COND are true
1238 as well.
1240 For example, if a < b is true, then a <= b must also be true. */
1242 static void
1243 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1245 tree op0, op1;
1246 cond_equivalence c;
1248 if (!COMPARISON_CLASS_P (cond))
1249 return;
1251 op0 = TREE_OPERAND (cond, 0);
1252 op1 = TREE_OPERAND (cond, 1);
1254 switch (TREE_CODE (cond))
1256 case LT_EXPR:
1257 case GT_EXPR:
1258 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1260 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1261 &edge_info->cond_equivalences);
1262 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1263 &edge_info->cond_equivalences);
1266 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1267 ? LE_EXPR : GE_EXPR),
1268 op0, op1, &edge_info->cond_equivalences);
1269 build_and_record_new_cond (NE_EXPR, op0, op1,
1270 &edge_info->cond_equivalences);
1271 break;
1273 case GE_EXPR:
1274 case LE_EXPR:
1275 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1277 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1278 &edge_info->cond_equivalences);
1280 break;
1282 case EQ_EXPR:
1283 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1285 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1286 &edge_info->cond_equivalences);
1288 build_and_record_new_cond (LE_EXPR, op0, op1,
1289 &edge_info->cond_equivalences);
1290 build_and_record_new_cond (GE_EXPR, op0, op1,
1291 &edge_info->cond_equivalences);
1292 break;
1294 case UNORDERED_EXPR:
1295 build_and_record_new_cond (NE_EXPR, op0, op1,
1296 &edge_info->cond_equivalences);
1297 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1298 &edge_info->cond_equivalences);
1299 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1300 &edge_info->cond_equivalences);
1301 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1302 &edge_info->cond_equivalences);
1303 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1304 &edge_info->cond_equivalences);
1305 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1306 &edge_info->cond_equivalences);
1307 break;
1309 case UNLT_EXPR:
1310 case UNGT_EXPR:
1311 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1312 ? UNLE_EXPR : UNGE_EXPR),
1313 op0, op1, &edge_info->cond_equivalences);
1314 build_and_record_new_cond (NE_EXPR, op0, op1,
1315 &edge_info->cond_equivalences);
1316 break;
1318 case UNEQ_EXPR:
1319 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1320 &edge_info->cond_equivalences);
1321 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1322 &edge_info->cond_equivalences);
1323 break;
1325 case LTGT_EXPR:
1326 build_and_record_new_cond (NE_EXPR, op0, op1,
1327 &edge_info->cond_equivalences);
1328 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1329 &edge_info->cond_equivalences);
1330 break;
1332 default:
1333 break;
1336 /* Now store the original true and false conditions into the first
1337 two slots. */
1338 initialize_expr_from_cond (cond, &c.cond);
1339 c.value = boolean_true_node;
1340 edge_info->cond_equivalences.safe_push (c);
1342 /* It is possible for INVERTED to be the negation of a comparison,
1343 and not a valid RHS or GIMPLE_COND condition. This happens because
1344 invert_truthvalue may return such an expression when asked to invert
1345 a floating-point comparison. These comparisons are not assumed to
1346 obey the trichotomy law. */
1347 initialize_expr_from_cond (inverted, &c.cond);
1348 c.value = boolean_false_node;
1349 edge_info->cond_equivalences.safe_push (c);
1352 /* A helper function for record_const_or_copy and record_equality.
1353 Do the work of recording the value and undo info. */
1355 static void
1356 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1358 set_ssa_name_value (x, y);
1360 if (dump_file && (dump_flags & TDF_DETAILS))
1362 fprintf (dump_file, "0>>> COPY ");
1363 print_generic_expr (dump_file, x, 0);
1364 fprintf (dump_file, " = ");
1365 print_generic_expr (dump_file, y, 0);
1366 fprintf (dump_file, "\n");
1369 const_and_copies_stack.reserve (2);
1370 const_and_copies_stack.quick_push (prev_x);
1371 const_and_copies_stack.quick_push (x);
1374 /* Return the loop depth of the basic block of the defining statement of X.
1375 This number should not be treated as absolutely correct because the loop
1376 information may not be completely up-to-date when dom runs. However, it
1377 will be relatively correct, and as more passes are taught to keep loop info
1378 up to date, the result will become more and more accurate. */
1381 loop_depth_of_name (tree x)
1383 gimple defstmt;
1384 basic_block defbb;
1386 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1387 if (TREE_CODE (x) != SSA_NAME)
1388 return 0;
1390 /* Otherwise return the loop depth of the defining statement's bb.
1391 Note that there may not actually be a bb for this statement, if the
1392 ssa_name is live on entry. */
1393 defstmt = SSA_NAME_DEF_STMT (x);
1394 defbb = gimple_bb (defstmt);
1395 if (!defbb)
1396 return 0;
1398 return bb_loop_depth (defbb);
1401 /* Record that X is equal to Y in const_and_copies. Record undo
1402 information in the block-local vector. */
1404 static void
1405 record_const_or_copy (tree x, tree y)
1407 tree prev_x = SSA_NAME_VALUE (x);
1409 gcc_assert (TREE_CODE (x) == SSA_NAME);
1411 if (TREE_CODE (y) == SSA_NAME)
1413 tree tmp = SSA_NAME_VALUE (y);
1414 if (tmp)
1415 y = tmp;
1418 record_const_or_copy_1 (x, y, prev_x);
1421 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1422 This constrains the cases in which we may treat this as assignment. */
1424 static void
1425 record_equality (tree x, tree y)
1427 tree prev_x = NULL, prev_y = NULL;
1429 if (TREE_CODE (x) == SSA_NAME)
1430 prev_x = SSA_NAME_VALUE (x);
1431 if (TREE_CODE (y) == SSA_NAME)
1432 prev_y = SSA_NAME_VALUE (y);
1434 /* If one of the previous values is invariant, or invariant in more loops
1435 (by depth), then use that.
1436 Otherwise it doesn't matter which value we choose, just so
1437 long as we canonicalize on one value. */
1438 if (is_gimple_min_invariant (y))
1440 else if (is_gimple_min_invariant (x)
1441 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1442 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1443 else if (prev_x && is_gimple_min_invariant (prev_x))
1444 x = y, y = prev_x, prev_x = prev_y;
1445 else if (prev_y)
1446 y = prev_y;
1448 /* After the swapping, we must have one SSA_NAME. */
1449 if (TREE_CODE (x) != SSA_NAME)
1450 return;
1452 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1453 variable compared against zero. If we're honoring signed zeros,
1454 then we cannot record this value unless we know that the value is
1455 nonzero. */
1456 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1457 && (TREE_CODE (y) != REAL_CST
1458 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1459 return;
1461 record_const_or_copy_1 (x, y, prev_x);
1464 /* Returns true when STMT is a simple iv increment. It detects the
1465 following situation:
1467 i_1 = phi (..., i_2)
1468 i_2 = i_1 +/- ... */
1470 bool
1471 simple_iv_increment_p (gimple stmt)
1473 enum tree_code code;
1474 tree lhs, preinc;
1475 gimple phi;
1476 size_t i;
1478 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1479 return false;
1481 lhs = gimple_assign_lhs (stmt);
1482 if (TREE_CODE (lhs) != SSA_NAME)
1483 return false;
1485 code = gimple_assign_rhs_code (stmt);
1486 if (code != PLUS_EXPR
1487 && code != MINUS_EXPR
1488 && code != POINTER_PLUS_EXPR)
1489 return false;
1491 preinc = gimple_assign_rhs1 (stmt);
1492 if (TREE_CODE (preinc) != SSA_NAME)
1493 return false;
1495 phi = SSA_NAME_DEF_STMT (preinc);
1496 if (gimple_code (phi) != GIMPLE_PHI)
1497 return false;
1499 for (i = 0; i < gimple_phi_num_args (phi); i++)
1500 if (gimple_phi_arg_def (phi, i) == lhs)
1501 return true;
1503 return false;
1506 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1507 known value for that SSA_NAME (or NULL if no value is known).
1509 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1510 successors of BB. */
1512 static void
1513 cprop_into_successor_phis (basic_block bb)
1515 edge e;
1516 edge_iterator ei;
1518 FOR_EACH_EDGE (e, ei, bb->succs)
1520 int indx;
1521 gimple_stmt_iterator gsi;
1523 /* If this is an abnormal edge, then we do not want to copy propagate
1524 into the PHI alternative associated with this edge. */
1525 if (e->flags & EDGE_ABNORMAL)
1526 continue;
1528 gsi = gsi_start_phis (e->dest);
1529 if (gsi_end_p (gsi))
1530 continue;
1532 indx = e->dest_idx;
1533 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1535 tree new_val;
1536 use_operand_p orig_p;
1537 tree orig_val;
1538 gimple phi = gsi_stmt (gsi);
1540 /* The alternative may be associated with a constant, so verify
1541 it is an SSA_NAME before doing anything with it. */
1542 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1543 orig_val = get_use_from_ptr (orig_p);
1544 if (TREE_CODE (orig_val) != SSA_NAME)
1545 continue;
1547 /* If we have *ORIG_P in our constant/copy table, then replace
1548 ORIG_P with its value in our constant/copy table. */
1549 new_val = SSA_NAME_VALUE (orig_val);
1550 if (new_val
1551 && new_val != orig_val
1552 && (TREE_CODE (new_val) == SSA_NAME
1553 || is_gimple_min_invariant (new_val))
1554 && may_propagate_copy (orig_val, new_val))
1555 propagate_value (orig_p, new_val);
1560 /* We have finished optimizing BB, record any information implied by
1561 taking a specific outgoing edge from BB. */
1563 static void
1564 record_edge_info (basic_block bb)
1566 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1567 struct edge_info *edge_info;
1569 if (! gsi_end_p (gsi))
1571 gimple stmt = gsi_stmt (gsi);
1572 location_t loc = gimple_location (stmt);
1574 if (gimple_code (stmt) == GIMPLE_SWITCH)
1576 tree index = gimple_switch_index (stmt);
1578 if (TREE_CODE (index) == SSA_NAME)
1580 int i;
1581 int n_labels = gimple_switch_num_labels (stmt);
1582 tree *info = XCNEWVEC (tree, last_basic_block);
1583 edge e;
1584 edge_iterator ei;
1586 for (i = 0; i < n_labels; i++)
1588 tree label = gimple_switch_label (stmt, i);
1589 basic_block target_bb = label_to_block (CASE_LABEL (label));
1590 if (CASE_HIGH (label)
1591 || !CASE_LOW (label)
1592 || info[target_bb->index])
1593 info[target_bb->index] = error_mark_node;
1594 else
1595 info[target_bb->index] = label;
1598 FOR_EACH_EDGE (e, ei, bb->succs)
1600 basic_block target_bb = e->dest;
1601 tree label = info[target_bb->index];
1603 if (label != NULL && label != error_mark_node)
1605 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1606 CASE_LOW (label));
1607 edge_info = allocate_edge_info (e);
1608 edge_info->lhs = index;
1609 edge_info->rhs = x;
1612 free (info);
1616 /* A COND_EXPR may create equivalences too. */
1617 if (gimple_code (stmt) == GIMPLE_COND)
1619 edge true_edge;
1620 edge false_edge;
1622 tree op0 = gimple_cond_lhs (stmt);
1623 tree op1 = gimple_cond_rhs (stmt);
1624 enum tree_code code = gimple_cond_code (stmt);
1626 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1628 /* Special case comparing booleans against a constant as we
1629 know the value of OP0 on both arms of the branch. i.e., we
1630 can record an equivalence for OP0 rather than COND. */
1631 if ((code == EQ_EXPR || code == NE_EXPR)
1632 && TREE_CODE (op0) == SSA_NAME
1633 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1634 && is_gimple_min_invariant (op1))
1636 if (code == EQ_EXPR)
1638 edge_info = allocate_edge_info (true_edge);
1639 edge_info->lhs = op0;
1640 edge_info->rhs = (integer_zerop (op1)
1641 ? boolean_false_node
1642 : boolean_true_node);
1644 edge_info = allocate_edge_info (false_edge);
1645 edge_info->lhs = op0;
1646 edge_info->rhs = (integer_zerop (op1)
1647 ? boolean_true_node
1648 : boolean_false_node);
1650 else
1652 edge_info = allocate_edge_info (true_edge);
1653 edge_info->lhs = op0;
1654 edge_info->rhs = (integer_zerop (op1)
1655 ? boolean_true_node
1656 : boolean_false_node);
1658 edge_info = allocate_edge_info (false_edge);
1659 edge_info->lhs = op0;
1660 edge_info->rhs = (integer_zerop (op1)
1661 ? boolean_false_node
1662 : boolean_true_node);
1665 else if (is_gimple_min_invariant (op0)
1666 && (TREE_CODE (op1) == SSA_NAME
1667 || is_gimple_min_invariant (op1)))
1669 tree cond = build2 (code, boolean_type_node, op0, op1);
1670 tree inverted = invert_truthvalue_loc (loc, cond);
1671 bool can_infer_simple_equiv
1672 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
1673 && real_zerop (op0));
1674 struct edge_info *edge_info;
1676 edge_info = allocate_edge_info (true_edge);
1677 record_conditions (edge_info, cond, inverted);
1679 if (can_infer_simple_equiv && code == EQ_EXPR)
1681 edge_info->lhs = op1;
1682 edge_info->rhs = op0;
1685 edge_info = allocate_edge_info (false_edge);
1686 record_conditions (edge_info, inverted, cond);
1688 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1690 edge_info->lhs = op1;
1691 edge_info->rhs = op0;
1695 else if (TREE_CODE (op0) == SSA_NAME
1696 && (TREE_CODE (op1) == SSA_NAME
1697 || is_gimple_min_invariant (op1)))
1699 tree cond = build2 (code, boolean_type_node, op0, op1);
1700 tree inverted = invert_truthvalue_loc (loc, cond);
1701 bool can_infer_simple_equiv
1702 = !(HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op1)))
1703 && (TREE_CODE (op1) == SSA_NAME || real_zerop (op1)));
1704 struct edge_info *edge_info;
1706 edge_info = allocate_edge_info (true_edge);
1707 record_conditions (edge_info, cond, inverted);
1709 if (can_infer_simple_equiv && code == EQ_EXPR)
1711 edge_info->lhs = op0;
1712 edge_info->rhs = op1;
1715 edge_info = allocate_edge_info (false_edge);
1716 record_conditions (edge_info, inverted, cond);
1718 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1720 edge_info->lhs = op0;
1721 edge_info->rhs = op1;
1726 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1730 static void
1731 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1732 basic_block bb)
1734 gimple_stmt_iterator gsi;
1736 if (dump_file && (dump_flags & TDF_DETAILS))
1737 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1739 /* Push a marker on the stacks of local information so that we know how
1740 far to unwind when we finalize this block. */
1741 avail_exprs_stack.safe_push (NULL);
1742 const_and_copies_stack.safe_push (NULL_TREE);
1744 record_equivalences_from_incoming_edge (bb);
1746 /* PHI nodes can create equivalences too. */
1747 record_equivalences_from_phis (bb);
1749 /* Create equivalences from redundant PHIs. PHIs are only truly
1750 redundant when they exist in the same block, so push another
1751 marker and unwind right afterwards. */
1752 avail_exprs_stack.safe_push (NULL);
1753 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1754 eliminate_redundant_computations (&gsi);
1755 remove_local_expressions_from_table ();
1757 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1758 optimize_stmt (bb, gsi);
1760 /* Now prepare to process dominated blocks. */
1761 record_edge_info (bb);
1762 cprop_into_successor_phis (bb);
1765 /* We have finished processing the dominator children of BB, perform
1766 any finalization actions in preparation for leaving this node in
1767 the dominator tree. */
1769 static void
1770 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1772 gimple last;
1774 /* If we have an outgoing edge to a block with multiple incoming and
1775 outgoing edges, then we may be able to thread the edge, i.e., we
1776 may be able to statically determine which of the outgoing edges
1777 will be traversed when the incoming edge from BB is traversed. */
1778 if (single_succ_p (bb)
1779 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1780 && potentially_threadable_block (single_succ (bb)))
1782 /* Push a marker on the stack, which thread_across_edge expects
1783 and will remove. */
1784 const_and_copies_stack.safe_push (NULL_TREE);
1785 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1787 else if ((last = last_stmt (bb))
1788 && gimple_code (last) == GIMPLE_COND
1789 && EDGE_COUNT (bb->succs) == 2
1790 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1791 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1793 edge true_edge, false_edge;
1795 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1797 /* Only try to thread the edge if it reaches a target block with
1798 more than one predecessor and more than one successor. */
1799 if (potentially_threadable_block (true_edge->dest))
1801 struct edge_info *edge_info;
1802 unsigned int i;
1804 /* Push a marker onto the available expression stack so that we
1805 unwind any expressions related to the TRUE arm before processing
1806 the false arm below. */
1807 avail_exprs_stack.safe_push (NULL);
1808 const_and_copies_stack.safe_push (NULL_TREE);
1810 edge_info = (struct edge_info *) true_edge->aux;
1812 /* If we have info associated with this edge, record it into
1813 our equivalence tables. */
1814 if (edge_info)
1816 cond_equivalence *eq;
1817 tree lhs = edge_info->lhs;
1818 tree rhs = edge_info->rhs;
1820 /* If we have a simple NAME = VALUE equivalence, record it. */
1821 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1822 record_const_or_copy (lhs, rhs);
1824 /* If we have 0 = COND or 1 = COND equivalences, record them
1825 into our expression hash tables. */
1826 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
1827 record_cond (eq);
1830 dom_thread_across_edge (walk_data, true_edge);
1832 /* And restore the various tables to their state before
1833 we threaded this edge. */
1834 remove_local_expressions_from_table ();
1837 /* Similarly for the ELSE arm. */
1838 if (potentially_threadable_block (false_edge->dest))
1840 struct edge_info *edge_info;
1841 unsigned int i;
1843 const_and_copies_stack.safe_push (NULL_TREE);
1844 edge_info = (struct edge_info *) false_edge->aux;
1846 /* If we have info associated with this edge, record it into
1847 our equivalence tables. */
1848 if (edge_info)
1850 cond_equivalence *eq;
1851 tree lhs = edge_info->lhs;
1852 tree rhs = edge_info->rhs;
1854 /* If we have a simple NAME = VALUE equivalence, record it. */
1855 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1856 record_const_or_copy (lhs, rhs);
1858 /* If we have 0 = COND or 1 = COND equivalences, record them
1859 into our expression hash tables. */
1860 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
1861 record_cond (eq);
1864 /* Now thread the edge. */
1865 dom_thread_across_edge (walk_data, false_edge);
1867 /* No need to remove local expressions from our tables
1868 or restore vars to their original value as that will
1869 be done immediately below. */
1873 remove_local_expressions_from_table ();
1874 restore_vars_to_original_value ();
1877 /* Search for redundant computations in STMT. If any are found, then
1878 replace them with the variable holding the result of the computation.
1880 If safe, record this expression into the available expression hash
1881 table. */
1883 static void
1884 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1886 tree expr_type;
1887 tree cached_lhs;
1888 tree def;
1889 bool insert = true;
1890 bool assigns_var_p = false;
1892 gimple stmt = gsi_stmt (*gsi);
1894 if (gimple_code (stmt) == GIMPLE_PHI)
1895 def = gimple_phi_result (stmt);
1896 else
1897 def = gimple_get_lhs (stmt);
1899 /* Certain expressions on the RHS can be optimized away, but can not
1900 themselves be entered into the hash tables. */
1901 if (! def
1902 || TREE_CODE (def) != SSA_NAME
1903 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1904 || gimple_vdef (stmt)
1905 /* Do not record equivalences for increments of ivs. This would create
1906 overlapping live ranges for a very questionable gain. */
1907 || simple_iv_increment_p (stmt))
1908 insert = false;
1910 /* Check if the expression has been computed before. */
1911 cached_lhs = lookup_avail_expr (stmt, insert);
1913 opt_stats.num_exprs_considered++;
1915 /* Get the type of the expression we are trying to optimize. */
1916 if (is_gimple_assign (stmt))
1918 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1919 assigns_var_p = true;
1921 else if (gimple_code (stmt) == GIMPLE_COND)
1922 expr_type = boolean_type_node;
1923 else if (is_gimple_call (stmt))
1925 gcc_assert (gimple_call_lhs (stmt));
1926 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1927 assigns_var_p = true;
1929 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1930 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1931 else if (gimple_code (stmt) == GIMPLE_PHI)
1932 /* We can't propagate into a phi, so the logic below doesn't apply.
1933 Instead record an equivalence between the cached LHS and the
1934 PHI result of this statement, provided they are in the same block.
1935 This should be sufficient to kill the redundant phi. */
1937 if (def && cached_lhs)
1938 record_const_or_copy (def, cached_lhs);
1939 return;
1941 else
1942 gcc_unreachable ();
1944 if (!cached_lhs)
1945 return;
1947 /* It is safe to ignore types here since we have already done
1948 type checking in the hashing and equality routines. In fact
1949 type checking here merely gets in the way of constant
1950 propagation. Also, make sure that it is safe to propagate
1951 CACHED_LHS into the expression in STMT. */
1952 if ((TREE_CODE (cached_lhs) != SSA_NAME
1953 && (assigns_var_p
1954 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1955 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1957 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
1958 || is_gimple_min_invariant (cached_lhs));
1960 if (dump_file && (dump_flags & TDF_DETAILS))
1962 fprintf (dump_file, " Replaced redundant expr '");
1963 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1964 fprintf (dump_file, "' with '");
1965 print_generic_expr (dump_file, cached_lhs, dump_flags);
1966 fprintf (dump_file, "'\n");
1969 opt_stats.num_re++;
1971 if (assigns_var_p
1972 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1973 cached_lhs = fold_convert (expr_type, cached_lhs);
1975 propagate_tree_value_into_stmt (gsi, cached_lhs);
1977 /* Since it is always necessary to mark the result as modified,
1978 perhaps we should move this into propagate_tree_value_into_stmt
1979 itself. */
1980 gimple_set_modified (gsi_stmt (*gsi), true);
1984 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1985 the available expressions table or the const_and_copies table.
1986 Detect and record those equivalences. */
1987 /* We handle only very simple copy equivalences here. The heavy
1988 lifing is done by eliminate_redundant_computations. */
1990 static void
1991 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1993 tree lhs;
1994 enum tree_code lhs_code;
1996 gcc_assert (is_gimple_assign (stmt));
1998 lhs = gimple_assign_lhs (stmt);
1999 lhs_code = TREE_CODE (lhs);
2001 if (lhs_code == SSA_NAME
2002 && gimple_assign_single_p (stmt))
2004 tree rhs = gimple_assign_rhs1 (stmt);
2006 /* If the RHS of the assignment is a constant or another variable that
2007 may be propagated, register it in the CONST_AND_COPIES table. We
2008 do not need to record unwind data for this, since this is a true
2009 assignment and not an equivalence inferred from a comparison. All
2010 uses of this ssa name are dominated by this assignment, so unwinding
2011 just costs time and space. */
2012 if (may_optimize_p
2013 && (TREE_CODE (rhs) == SSA_NAME
2014 || is_gimple_min_invariant (rhs)))
2016 if (dump_file && (dump_flags & TDF_DETAILS))
2018 fprintf (dump_file, "==== ASGN ");
2019 print_generic_expr (dump_file, lhs, 0);
2020 fprintf (dump_file, " = ");
2021 print_generic_expr (dump_file, rhs, 0);
2022 fprintf (dump_file, "\n");
2025 set_ssa_name_value (lhs, rhs);
2029 /* A memory store, even an aliased store, creates a useful
2030 equivalence. By exchanging the LHS and RHS, creating suitable
2031 vops and recording the result in the available expression table,
2032 we may be able to expose more redundant loads. */
2033 if (!gimple_has_volatile_ops (stmt)
2034 && gimple_references_memory_p (stmt)
2035 && gimple_assign_single_p (stmt)
2036 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2037 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2038 && !is_gimple_reg (lhs))
2040 tree rhs = gimple_assign_rhs1 (stmt);
2041 gimple new_stmt;
2043 /* Build a new statement with the RHS and LHS exchanged. */
2044 if (TREE_CODE (rhs) == SSA_NAME)
2046 /* NOTE tuples. The call to gimple_build_assign below replaced
2047 a call to build_gimple_modify_stmt, which did not set the
2048 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
2049 may cause an SSA validation failure, as the LHS may be a
2050 default-initialized name and should have no definition. I'm
2051 a bit dubious of this, as the artificial statement that we
2052 generate here may in fact be ill-formed, but it is simply
2053 used as an internal device in this pass, and never becomes
2054 part of the CFG. */
2055 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2056 new_stmt = gimple_build_assign (rhs, lhs);
2057 SSA_NAME_DEF_STMT (rhs) = defstmt;
2059 else
2060 new_stmt = gimple_build_assign (rhs, lhs);
2062 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
2064 /* Finally enter the statement into the available expression
2065 table. */
2066 lookup_avail_expr (new_stmt, true);
2070 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2071 CONST_AND_COPIES. */
2073 static void
2074 cprop_operand (gimple stmt, use_operand_p op_p)
2076 tree val;
2077 tree op = USE_FROM_PTR (op_p);
2079 /* If the operand has a known constant value or it is known to be a
2080 copy of some other variable, use the value or copy stored in
2081 CONST_AND_COPIES. */
2082 val = SSA_NAME_VALUE (op);
2083 if (val && val != op)
2085 /* Do not replace hard register operands in asm statements. */
2086 if (gimple_code (stmt) == GIMPLE_ASM
2087 && !may_propagate_copy_into_asm (op))
2088 return;
2090 /* Certain operands are not allowed to be copy propagated due
2091 to their interaction with exception handling and some GCC
2092 extensions. */
2093 if (!may_propagate_copy (op, val))
2094 return;
2096 /* Do not propagate addresses that point to volatiles into memory
2097 stmts without volatile operands. */
2098 if (POINTER_TYPE_P (TREE_TYPE (val))
2099 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2100 && gimple_has_mem_ops (stmt)
2101 && !gimple_has_volatile_ops (stmt))
2102 return;
2104 /* Do not propagate copies if the propagated value is at a deeper loop
2105 depth than the propagatee. Otherwise, this may move loop variant
2106 variables outside of their loops and prevent coalescing
2107 opportunities. If the value was loop invariant, it will be hoisted
2108 by LICM and exposed for copy propagation. */
2109 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2110 return;
2112 /* Do not propagate copies into simple IV increment statements.
2113 See PR23821 for how this can disturb IV analysis. */
2114 if (TREE_CODE (val) != INTEGER_CST
2115 && simple_iv_increment_p (stmt))
2116 return;
2118 /* Dump details. */
2119 if (dump_file && (dump_flags & TDF_DETAILS))
2121 fprintf (dump_file, " Replaced '");
2122 print_generic_expr (dump_file, op, dump_flags);
2123 fprintf (dump_file, "' with %s '",
2124 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2125 print_generic_expr (dump_file, val, dump_flags);
2126 fprintf (dump_file, "'\n");
2129 if (TREE_CODE (val) != SSA_NAME)
2130 opt_stats.num_const_prop++;
2131 else
2132 opt_stats.num_copy_prop++;
2134 propagate_value (op_p, val);
2136 /* And note that we modified this statement. This is now
2137 safe, even if we changed virtual operands since we will
2138 rescan the statement and rewrite its operands again. */
2139 gimple_set_modified (stmt, true);
2143 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2144 known value for that SSA_NAME (or NULL if no value is known).
2146 Propagate values from CONST_AND_COPIES into the uses, vuses and
2147 vdef_ops of STMT. */
2149 static void
2150 cprop_into_stmt (gimple stmt)
2152 use_operand_p op_p;
2153 ssa_op_iter iter;
2155 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_USE)
2156 cprop_operand (stmt, op_p);
2159 /* Optimize the statement pointed to by iterator SI.
2161 We try to perform some simplistic global redundancy elimination and
2162 constant propagation:
2164 1- To detect global redundancy, we keep track of expressions that have
2165 been computed in this block and its dominators. If we find that the
2166 same expression is computed more than once, we eliminate repeated
2167 computations by using the target of the first one.
2169 2- Constant values and copy assignments. This is used to do very
2170 simplistic constant and copy propagation. When a constant or copy
2171 assignment is found, we map the value on the RHS of the assignment to
2172 the variable in the LHS in the CONST_AND_COPIES table. */
2174 static void
2175 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2177 gimple stmt, old_stmt;
2178 bool may_optimize_p;
2179 bool modified_p = false;
2181 old_stmt = stmt = gsi_stmt (si);
2183 if (dump_file && (dump_flags & TDF_DETAILS))
2185 fprintf (dump_file, "Optimizing statement ");
2186 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2189 if (gimple_code (stmt) == GIMPLE_COND)
2190 canonicalize_comparison (stmt);
2192 update_stmt_if_modified (stmt);
2193 opt_stats.num_stmts++;
2195 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2196 cprop_into_stmt (stmt);
2198 /* If the statement has been modified with constant replacements,
2199 fold its RHS before checking for redundant computations. */
2200 if (gimple_modified_p (stmt))
2202 tree rhs = NULL;
2204 /* Try to fold the statement making sure that STMT is kept
2205 up to date. */
2206 if (fold_stmt (&si))
2208 stmt = gsi_stmt (si);
2209 gimple_set_modified (stmt, true);
2211 if (dump_file && (dump_flags & TDF_DETAILS))
2213 fprintf (dump_file, " Folded to: ");
2214 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2218 /* We only need to consider cases that can yield a gimple operand. */
2219 if (gimple_assign_single_p (stmt))
2220 rhs = gimple_assign_rhs1 (stmt);
2221 else if (gimple_code (stmt) == GIMPLE_GOTO)
2222 rhs = gimple_goto_dest (stmt);
2223 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2224 /* This should never be an ADDR_EXPR. */
2225 rhs = gimple_switch_index (stmt);
2227 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2228 recompute_tree_invariant_for_addr_expr (rhs);
2230 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2231 even if fold_stmt updated the stmt already and thus cleared
2232 gimple_modified_p flag on it. */
2233 modified_p = true;
2236 /* Check for redundant computations. Do this optimization only
2237 for assignments that have no volatile ops and conditionals. */
2238 may_optimize_p = (!gimple_has_side_effects (stmt)
2239 && (is_gimple_assign (stmt)
2240 || (is_gimple_call (stmt)
2241 && gimple_call_lhs (stmt) != NULL_TREE)
2242 || gimple_code (stmt) == GIMPLE_COND
2243 || gimple_code (stmt) == GIMPLE_SWITCH));
2245 if (may_optimize_p)
2247 if (gimple_code (stmt) == GIMPLE_CALL)
2249 /* Resolve __builtin_constant_p. If it hasn't been
2250 folded to integer_one_node by now, it's fairly
2251 certain that the value simply isn't constant. */
2252 tree callee = gimple_call_fndecl (stmt);
2253 if (callee
2254 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2255 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2257 propagate_tree_value_into_stmt (&si, integer_zero_node);
2258 stmt = gsi_stmt (si);
2262 update_stmt_if_modified (stmt);
2263 eliminate_redundant_computations (&si);
2264 stmt = gsi_stmt (si);
2266 /* Perform simple redundant store elimination. */
2267 if (gimple_assign_single_p (stmt)
2268 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2270 tree lhs = gimple_assign_lhs (stmt);
2271 tree rhs = gimple_assign_rhs1 (stmt);
2272 tree cached_lhs;
2273 gimple new_stmt;
2274 if (TREE_CODE (rhs) == SSA_NAME)
2276 tree tem = SSA_NAME_VALUE (rhs);
2277 if (tem)
2278 rhs = tem;
2280 /* Build a new statement with the RHS and LHS exchanged. */
2281 if (TREE_CODE (rhs) == SSA_NAME)
2283 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2284 new_stmt = gimple_build_assign (rhs, lhs);
2285 SSA_NAME_DEF_STMT (rhs) = defstmt;
2287 else
2288 new_stmt = gimple_build_assign (rhs, lhs);
2289 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2290 cached_lhs = lookup_avail_expr (new_stmt, false);
2291 if (cached_lhs
2292 && rhs == cached_lhs)
2294 basic_block bb = gimple_bb (stmt);
2295 unlink_stmt_vdef (stmt);
2296 if (gsi_remove (&si, true))
2298 bitmap_set_bit (need_eh_cleanup, bb->index);
2299 if (dump_file && (dump_flags & TDF_DETAILS))
2300 fprintf (dump_file, " Flagged to clear EH edges.\n");
2302 release_defs (stmt);
2303 return;
2308 /* Record any additional equivalences created by this statement. */
2309 if (is_gimple_assign (stmt))
2310 record_equivalences_from_stmt (stmt, may_optimize_p);
2312 /* If STMT is a COND_EXPR and it was modified, then we may know
2313 where it goes. If that is the case, then mark the CFG as altered.
2315 This will cause us to later call remove_unreachable_blocks and
2316 cleanup_tree_cfg when it is safe to do so. It is not safe to
2317 clean things up here since removal of edges and such can trigger
2318 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2319 the manager.
2321 That's all fine and good, except that once SSA_NAMEs are released
2322 to the manager, we must not call create_ssa_name until all references
2323 to released SSA_NAMEs have been eliminated.
2325 All references to the deleted SSA_NAMEs can not be eliminated until
2326 we remove unreachable blocks.
2328 We can not remove unreachable blocks until after we have completed
2329 any queued jump threading.
2331 We can not complete any queued jump threads until we have taken
2332 appropriate variables out of SSA form. Taking variables out of
2333 SSA form can call create_ssa_name and thus we lose.
2335 Ultimately I suspect we're going to need to change the interface
2336 into the SSA_NAME manager. */
2337 if (gimple_modified_p (stmt) || modified_p)
2339 tree val = NULL;
2341 update_stmt_if_modified (stmt);
2343 if (gimple_code (stmt) == GIMPLE_COND)
2344 val = fold_binary_loc (gimple_location (stmt),
2345 gimple_cond_code (stmt), boolean_type_node,
2346 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2347 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2348 val = gimple_switch_index (stmt);
2350 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2351 cfg_altered = true;
2353 /* If we simplified a statement in such a way as to be shown that it
2354 cannot trap, update the eh information and the cfg to match. */
2355 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2357 bitmap_set_bit (need_eh_cleanup, bb->index);
2358 if (dump_file && (dump_flags & TDF_DETAILS))
2359 fprintf (dump_file, " Flagged to clear EH edges.\n");
2364 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2365 If found, return its LHS. Otherwise insert STMT in the table and
2366 return NULL_TREE.
2368 Also, when an expression is first inserted in the table, it is also
2369 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2370 we finish processing this block and its children. */
2372 static tree
2373 lookup_avail_expr (gimple stmt, bool insert)
2375 void **slot;
2376 tree lhs;
2377 tree temp;
2378 struct expr_hash_elt element;
2380 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
2381 if (gimple_code (stmt) == GIMPLE_PHI)
2382 lhs = gimple_phi_result (stmt);
2383 else
2384 lhs = gimple_get_lhs (stmt);
2386 initialize_hash_element (stmt, lhs, &element);
2388 if (dump_file && (dump_flags & TDF_DETAILS))
2390 fprintf (dump_file, "LKUP ");
2391 print_expr_hash_elt (dump_file, &element);
2394 /* Don't bother remembering constant assignments and copy operations.
2395 Constants and copy operations are handled by the constant/copy propagator
2396 in optimize_stmt. */
2397 if (element.expr.kind == EXPR_SINGLE
2398 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2399 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2400 return NULL_TREE;
2402 /* Finally try to find the expression in the main expression hash table. */
2403 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2404 (insert ? INSERT : NO_INSERT));
2405 if (slot == NULL)
2407 free_expr_hash_elt_contents (&element);
2408 return NULL_TREE;
2410 else if (*slot == NULL)
2412 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2413 *element2 = element;
2414 element2->stamp = element2;
2415 *slot = (void *) element2;
2417 if (dump_file && (dump_flags & TDF_DETAILS))
2419 fprintf (dump_file, "2>>> ");
2420 print_expr_hash_elt (dump_file, element2);
2423 avail_exprs_stack.safe_push (element2);
2424 return NULL_TREE;
2426 else
2427 free_expr_hash_elt_contents (&element);
2429 /* Extract the LHS of the assignment so that it can be used as the current
2430 definition of another variable. */
2431 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2433 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2434 use the value from the const_and_copies table. */
2435 if (TREE_CODE (lhs) == SSA_NAME)
2437 temp = SSA_NAME_VALUE (lhs);
2438 if (temp)
2439 lhs = temp;
2442 if (dump_file && (dump_flags & TDF_DETAILS))
2444 fprintf (dump_file, "FIND: ");
2445 print_generic_expr (dump_file, lhs, 0);
2446 fprintf (dump_file, "\n");
2449 return lhs;
2452 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2453 for expressions using the code of the expression and the SSA numbers of
2454 its operands. */
2456 static hashval_t
2457 avail_expr_hash (const void *p)
2459 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2460 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2461 tree vuse;
2462 hashval_t val = 0;
2464 val = iterative_hash_hashable_expr (expr, val);
2466 /* If the hash table entry is not associated with a statement, then we
2467 can just hash the expression and not worry about virtual operands
2468 and such. */
2469 if (!stmt)
2470 return val;
2472 /* Add the SSA version numbers of the vuse operand. This is important
2473 because compound variables like arrays are not renamed in the
2474 operands. Rather, the rename is done on the virtual variable
2475 representing all the elements of the array. */
2476 if ((vuse = gimple_vuse (stmt)))
2477 val = iterative_hash_expr (vuse, val);
2479 return val;
2482 static hashval_t
2483 real_avail_expr_hash (const void *p)
2485 return ((const struct expr_hash_elt *)p)->hash;
2488 static int
2489 avail_expr_eq (const void *p1, const void *p2)
2491 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2492 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2493 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2494 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2495 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2496 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2498 /* This case should apply only when removing entries from the table. */
2499 if (stamp1 == stamp2)
2500 return true;
2502 /* FIXME tuples:
2503 We add stmts to a hash table and them modify them. To detect the case
2504 that we modify a stmt and then search for it, we assume that the hash
2505 is always modified by that change.
2506 We have to fully check why this doesn't happen on trunk or rewrite
2507 this in a more reliable (and easier to understand) way. */
2508 if (((const struct expr_hash_elt *)p1)->hash
2509 != ((const struct expr_hash_elt *)p2)->hash)
2510 return false;
2512 /* In case of a collision, both RHS have to be identical and have the
2513 same VUSE operands. */
2514 if (hashable_expr_equal_p (expr1, expr2)
2515 && types_compatible_p (expr1->type, expr2->type))
2517 /* Note that STMT1 and/or STMT2 may be NULL. */
2518 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2519 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2522 return false;
2525 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2526 up degenerate PHIs created by or exposed by jump threading. */
2528 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2529 NULL. */
2531 tree
2532 degenerate_phi_result (gimple phi)
2534 tree lhs = gimple_phi_result (phi);
2535 tree val = NULL;
2536 size_t i;
2538 /* Ignoring arguments which are the same as LHS, if all the remaining
2539 arguments are the same, then the PHI is a degenerate and has the
2540 value of that common argument. */
2541 for (i = 0; i < gimple_phi_num_args (phi); i++)
2543 tree arg = gimple_phi_arg_def (phi, i);
2545 if (arg == lhs)
2546 continue;
2547 else if (!arg)
2548 break;
2549 else if (!val)
2550 val = arg;
2551 else if (arg == val)
2552 continue;
2553 /* We bring in some of operand_equal_p not only to speed things
2554 up, but also to avoid crashing when dereferencing the type of
2555 a released SSA name. */
2556 else if (TREE_CODE (val) != TREE_CODE (arg)
2557 || TREE_CODE (val) == SSA_NAME
2558 || !operand_equal_p (arg, val, 0))
2559 break;
2561 return (i == gimple_phi_num_args (phi) ? val : NULL);
2564 /* Given a statement STMT, which is either a PHI node or an assignment,
2565 remove it from the IL. */
2567 static void
2568 remove_stmt_or_phi (gimple stmt)
2570 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2572 if (gimple_code (stmt) == GIMPLE_PHI)
2573 remove_phi_node (&gsi, true);
2574 else
2576 gsi_remove (&gsi, true);
2577 release_defs (stmt);
2581 /* Given a statement STMT, which is either a PHI node or an assignment,
2582 return the "rhs" of the node, in the case of a non-degenerate
2583 phi, NULL is returned. */
2585 static tree
2586 get_rhs_or_phi_arg (gimple stmt)
2588 if (gimple_code (stmt) == GIMPLE_PHI)
2589 return degenerate_phi_result (stmt);
2590 else if (gimple_assign_single_p (stmt))
2591 return gimple_assign_rhs1 (stmt);
2592 else
2593 gcc_unreachable ();
2597 /* Given a statement STMT, which is either a PHI node or an assignment,
2598 return the "lhs" of the node. */
2600 static tree
2601 get_lhs_or_phi_result (gimple stmt)
2603 if (gimple_code (stmt) == GIMPLE_PHI)
2604 return gimple_phi_result (stmt);
2605 else if (is_gimple_assign (stmt))
2606 return gimple_assign_lhs (stmt);
2607 else
2608 gcc_unreachable ();
2611 /* Propagate RHS into all uses of LHS (when possible).
2613 RHS and LHS are derived from STMT, which is passed in solely so
2614 that we can remove it if propagation is successful.
2616 When propagating into a PHI node or into a statement which turns
2617 into a trivial copy or constant initialization, set the
2618 appropriate bit in INTERESTING_NAMEs so that we will visit those
2619 nodes as well in an effort to pick up secondary optimization
2620 opportunities. */
2622 static void
2623 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2625 /* First verify that propagation is valid and isn't going to move a
2626 loop variant variable outside its loop. */
2627 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2628 && (TREE_CODE (rhs) != SSA_NAME
2629 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2630 && may_propagate_copy (lhs, rhs)
2631 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2633 use_operand_p use_p;
2634 imm_use_iterator iter;
2635 gimple use_stmt;
2636 bool all = true;
2638 /* Dump details. */
2639 if (dump_file && (dump_flags & TDF_DETAILS))
2641 fprintf (dump_file, " Replacing '");
2642 print_generic_expr (dump_file, lhs, dump_flags);
2643 fprintf (dump_file, "' with %s '",
2644 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2645 print_generic_expr (dump_file, rhs, dump_flags);
2646 fprintf (dump_file, "'\n");
2649 /* Walk over every use of LHS and try to replace the use with RHS.
2650 At this point the only reason why such a propagation would not
2651 be successful would be if the use occurs in an ASM_EXPR. */
2652 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2654 /* Leave debug stmts alone. If we succeed in propagating
2655 all non-debug uses, we'll drop the DEF, and propagation
2656 into debug stmts will occur then. */
2657 if (gimple_debug_bind_p (use_stmt))
2658 continue;
2660 /* It's not always safe to propagate into an ASM_EXPR. */
2661 if (gimple_code (use_stmt) == GIMPLE_ASM
2662 && ! may_propagate_copy_into_asm (lhs))
2664 all = false;
2665 continue;
2668 /* It's not ok to propagate into the definition stmt of RHS.
2669 <bb 9>:
2670 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2671 g_67.1_6 = prephitmp.12_36;
2672 goto <bb 9>;
2673 While this is strictly all dead code we do not want to
2674 deal with this here. */
2675 if (TREE_CODE (rhs) == SSA_NAME
2676 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2678 all = false;
2679 continue;
2682 /* Dump details. */
2683 if (dump_file && (dump_flags & TDF_DETAILS))
2685 fprintf (dump_file, " Original statement:");
2686 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2689 /* Propagate the RHS into this use of the LHS. */
2690 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2691 propagate_value (use_p, rhs);
2693 /* Special cases to avoid useless calls into the folding
2694 routines, operand scanning, etc.
2696 Propagation into a PHI may cause the PHI to become
2697 a degenerate, so mark the PHI as interesting. No other
2698 actions are necessary. */
2699 if (gimple_code (use_stmt) == GIMPLE_PHI)
2701 tree result;
2703 /* Dump details. */
2704 if (dump_file && (dump_flags & TDF_DETAILS))
2706 fprintf (dump_file, " Updated statement:");
2707 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2710 result = get_lhs_or_phi_result (use_stmt);
2711 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2712 continue;
2715 /* From this point onward we are propagating into a
2716 real statement. Folding may (or may not) be possible,
2717 we may expose new operands, expose dead EH edges,
2718 etc. */
2719 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2720 cannot fold a call that simplifies to a constant,
2721 because the GIMPLE_CALL must be replaced by a
2722 GIMPLE_ASSIGN, and there is no way to effect such a
2723 transformation in-place. We might want to consider
2724 using the more general fold_stmt here. */
2726 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
2727 fold_stmt_inplace (&gsi);
2730 /* Sometimes propagation can expose new operands to the
2731 renamer. */
2732 update_stmt (use_stmt);
2734 /* Dump details. */
2735 if (dump_file && (dump_flags & TDF_DETAILS))
2737 fprintf (dump_file, " Updated statement:");
2738 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2741 /* If we replaced a variable index with a constant, then
2742 we would need to update the invariant flag for ADDR_EXPRs. */
2743 if (gimple_assign_single_p (use_stmt)
2744 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2745 recompute_tree_invariant_for_addr_expr
2746 (gimple_assign_rhs1 (use_stmt));
2748 /* If we cleaned up EH information from the statement,
2749 mark its containing block as needing EH cleanups. */
2750 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2752 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2753 if (dump_file && (dump_flags & TDF_DETAILS))
2754 fprintf (dump_file, " Flagged to clear EH edges.\n");
2757 /* Propagation may expose new trivial copy/constant propagation
2758 opportunities. */
2759 if (gimple_assign_single_p (use_stmt)
2760 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2761 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2762 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2764 tree result = get_lhs_or_phi_result (use_stmt);
2765 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2768 /* Propagation into these nodes may make certain edges in
2769 the CFG unexecutable. We want to identify them as PHI nodes
2770 at the destination of those unexecutable edges may become
2771 degenerates. */
2772 else if (gimple_code (use_stmt) == GIMPLE_COND
2773 || gimple_code (use_stmt) == GIMPLE_SWITCH
2774 || gimple_code (use_stmt) == GIMPLE_GOTO)
2776 tree val;
2778 if (gimple_code (use_stmt) == GIMPLE_COND)
2779 val = fold_binary_loc (gimple_location (use_stmt),
2780 gimple_cond_code (use_stmt),
2781 boolean_type_node,
2782 gimple_cond_lhs (use_stmt),
2783 gimple_cond_rhs (use_stmt));
2784 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2785 val = gimple_switch_index (use_stmt);
2786 else
2787 val = gimple_goto_dest (use_stmt);
2789 if (val && is_gimple_min_invariant (val))
2791 basic_block bb = gimple_bb (use_stmt);
2792 edge te = find_taken_edge (bb, val);
2793 edge_iterator ei;
2794 edge e;
2795 gimple_stmt_iterator gsi, psi;
2797 /* Remove all outgoing edges except TE. */
2798 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2800 if (e != te)
2802 /* Mark all the PHI nodes at the destination of
2803 the unexecutable edge as interesting. */
2804 for (psi = gsi_start_phis (e->dest);
2805 !gsi_end_p (psi);
2806 gsi_next (&psi))
2808 gimple phi = gsi_stmt (psi);
2810 tree result = gimple_phi_result (phi);
2811 int version = SSA_NAME_VERSION (result);
2813 bitmap_set_bit (interesting_names, version);
2816 te->probability += e->probability;
2818 te->count += e->count;
2819 remove_edge (e);
2820 cfg_altered = true;
2822 else
2823 ei_next (&ei);
2826 gsi = gsi_last_bb (gimple_bb (use_stmt));
2827 gsi_remove (&gsi, true);
2829 /* And fixup the flags on the single remaining edge. */
2830 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2831 te->flags &= ~EDGE_ABNORMAL;
2832 te->flags |= EDGE_FALLTHRU;
2833 if (te->probability > REG_BR_PROB_BASE)
2834 te->probability = REG_BR_PROB_BASE;
2839 /* Ensure there is nothing else to do. */
2840 gcc_assert (!all || has_zero_uses (lhs));
2842 /* If we were able to propagate away all uses of LHS, then
2843 we can remove STMT. */
2844 if (all)
2845 remove_stmt_or_phi (stmt);
2849 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2850 a statement that is a trivial copy or constant initialization.
2852 Attempt to eliminate T by propagating its RHS into all uses of
2853 its LHS. This may in turn set new bits in INTERESTING_NAMES
2854 for nodes we want to revisit later.
2856 All exit paths should clear INTERESTING_NAMES for the result
2857 of STMT. */
2859 static void
2860 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2862 tree lhs = get_lhs_or_phi_result (stmt);
2863 tree rhs;
2864 int version = SSA_NAME_VERSION (lhs);
2866 /* If the LHS of this statement or PHI has no uses, then we can
2867 just eliminate it. This can occur if, for example, the PHI
2868 was created by block duplication due to threading and its only
2869 use was in the conditional at the end of the block which was
2870 deleted. */
2871 if (has_zero_uses (lhs))
2873 bitmap_clear_bit (interesting_names, version);
2874 remove_stmt_or_phi (stmt);
2875 return;
2878 /* Get the RHS of the assignment or PHI node if the PHI is a
2879 degenerate. */
2880 rhs = get_rhs_or_phi_arg (stmt);
2881 if (!rhs)
2883 bitmap_clear_bit (interesting_names, version);
2884 return;
2887 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2889 /* Note that STMT may well have been deleted by now, so do
2890 not access it, instead use the saved version # to clear
2891 T's entry in the worklist. */
2892 bitmap_clear_bit (interesting_names, version);
2895 /* The first phase in degenerate PHI elimination.
2897 Eliminate the degenerate PHIs in BB, then recurse on the
2898 dominator children of BB. */
2900 static void
2901 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2903 gimple_stmt_iterator gsi;
2904 basic_block son;
2906 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2908 gimple phi = gsi_stmt (gsi);
2910 eliminate_const_or_copy (phi, interesting_names);
2913 /* Recurse into the dominator children of BB. */
2914 for (son = first_dom_son (CDI_DOMINATORS, bb);
2915 son;
2916 son = next_dom_son (CDI_DOMINATORS, son))
2917 eliminate_degenerate_phis_1 (son, interesting_names);
2921 /* A very simple pass to eliminate degenerate PHI nodes from the
2922 IL. This is meant to be fast enough to be able to be run several
2923 times in the optimization pipeline.
2925 Certain optimizations, particularly those which duplicate blocks
2926 or remove edges from the CFG can create or expose PHIs which are
2927 trivial copies or constant initializations.
2929 While we could pick up these optimizations in DOM or with the
2930 combination of copy-prop and CCP, those solutions are far too
2931 heavy-weight for our needs.
2933 This implementation has two phases so that we can efficiently
2934 eliminate the first order degenerate PHIs and second order
2935 degenerate PHIs.
2937 The first phase performs a dominator walk to identify and eliminate
2938 the vast majority of the degenerate PHIs. When a degenerate PHI
2939 is identified and eliminated any affected statements or PHIs
2940 are put on a worklist.
2942 The second phase eliminates degenerate PHIs and trivial copies
2943 or constant initializations using the worklist. This is how we
2944 pick up the secondary optimization opportunities with minimal
2945 cost. */
2947 static unsigned int
2948 eliminate_degenerate_phis (void)
2950 bitmap interesting_names;
2951 bitmap interesting_names1;
2953 /* Bitmap of blocks which need EH information updated. We can not
2954 update it on-the-fly as doing so invalidates the dominator tree. */
2955 need_eh_cleanup = BITMAP_ALLOC (NULL);
2957 /* INTERESTING_NAMES is effectively our worklist, indexed by
2958 SSA_NAME_VERSION.
2960 A set bit indicates that the statement or PHI node which
2961 defines the SSA_NAME should be (re)examined to determine if
2962 it has become a degenerate PHI or trivial const/copy propagation
2963 opportunity.
2965 Experiments have show we generally get better compilation
2966 time behavior with bitmaps rather than sbitmaps. */
2967 interesting_names = BITMAP_ALLOC (NULL);
2968 interesting_names1 = BITMAP_ALLOC (NULL);
2970 calculate_dominance_info (CDI_DOMINATORS);
2971 cfg_altered = false;
2973 /* First phase. Eliminate degenerate PHIs via a dominator
2974 walk of the CFG.
2976 Experiments have indicated that we generally get better
2977 compile-time behavior by visiting blocks in the first
2978 phase in dominator order. Presumably this is because walking
2979 in dominator order leaves fewer PHIs for later examination
2980 by the worklist phase. */
2981 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2983 /* Second phase. Eliminate second order degenerate PHIs as well
2984 as trivial copies or constant initializations identified by
2985 the first phase or this phase. Basically we keep iterating
2986 until our set of INTERESTING_NAMEs is empty. */
2987 while (!bitmap_empty_p (interesting_names))
2989 unsigned int i;
2990 bitmap_iterator bi;
2992 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2993 changed during the loop. Copy it to another bitmap and
2994 use that. */
2995 bitmap_copy (interesting_names1, interesting_names);
2997 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2999 tree name = ssa_name (i);
3001 /* Ignore SSA_NAMEs that have been released because
3002 their defining statement was deleted (unreachable). */
3003 if (name)
3004 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
3005 interesting_names);
3009 if (cfg_altered)
3010 free_dominance_info (CDI_DOMINATORS);
3012 /* Propagation of const and copies may make some EH edges dead. Purge
3013 such edges from the CFG as needed. */
3014 if (!bitmap_empty_p (need_eh_cleanup))
3016 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
3017 BITMAP_FREE (need_eh_cleanup);
3020 BITMAP_FREE (interesting_names);
3021 BITMAP_FREE (interesting_names1);
3022 return 0;
3025 struct gimple_opt_pass pass_phi_only_cprop =
3028 GIMPLE_PASS,
3029 "phicprop", /* name */
3030 OPTGROUP_NONE, /* optinfo_flags */
3031 gate_dominator, /* gate */
3032 eliminate_degenerate_phis, /* execute */
3033 NULL, /* sub */
3034 NULL, /* next */
3035 0, /* static_pass_number */
3036 TV_TREE_PHI_CPROP, /* tv_id */
3037 PROP_cfg | PROP_ssa, /* properties_required */
3038 0, /* properties_provided */
3039 0, /* properties_destroyed */
3040 0, /* todo_flags_start */
3041 TODO_cleanup_cfg
3042 | TODO_ggc_collect
3043 | TODO_verify_ssa
3044 | TODO_verify_stmts
3045 | TODO_update_ssa /* todo_flags_finish */