dumping cleanup phase 1 -- Removing TODO_dump_func
[official-gcc.git] / gcc / tree-ssa-dom.c
blobf41d7eca5626c4bd19b71c51e689a3faf93e1a08
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "output.h"
32 #include "function.h"
33 #include "tree-pretty-print.h"
34 #include "gimple-pretty-print.h"
35 #include "timevar.h"
36 #include "tree-dump.h"
37 #include "tree-flow.h"
38 #include "domwalk.h"
39 #include "tree-pass.h"
40 #include "tree-ssa-propagate.h"
41 #include "langhooks.h"
42 #include "params.h"
44 /* This file implements optimizations on the dominator tree. */
46 /* Representation of a "naked" right-hand-side expression, to be used
47 in recording available expressions in the expression hash table. */
49 enum expr_kind
51 EXPR_SINGLE,
52 EXPR_UNARY,
53 EXPR_BINARY,
54 EXPR_TERNARY,
55 EXPR_CALL
58 struct hashable_expr
60 tree type;
61 enum expr_kind kind;
62 union {
63 struct { tree rhs; } single;
64 struct { enum tree_code op; tree opnd; } unary;
65 struct { enum tree_code op; tree opnd0, opnd1; } binary;
66 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
67 struct { gimple fn_from; bool pure; size_t nargs; tree *args; } call;
68 } ops;
71 /* Structure for recording known values of a conditional expression
72 at the exits from its block. */
74 typedef struct cond_equivalence_s
76 struct hashable_expr cond;
77 tree value;
78 } cond_equivalence;
80 DEF_VEC_O(cond_equivalence);
81 DEF_VEC_ALLOC_O(cond_equivalence,heap);
83 /* Structure for recording edge equivalences as well as any pending
84 edge redirections during the dominator optimizer.
86 Computing and storing the edge equivalences instead of creating
87 them on-demand can save significant amounts of time, particularly
88 for pathological cases involving switch statements.
90 These structures live for a single iteration of the dominator
91 optimizer in the edge's AUX field. At the end of an iteration we
92 free each of these structures and update the AUX field to point
93 to any requested redirection target (the code for updating the
94 CFG and SSA graph for edge redirection expects redirection edge
95 targets to be in the AUX field for each edge. */
97 struct edge_info
99 /* If this edge creates a simple equivalence, the LHS and RHS of
100 the equivalence will be stored here. */
101 tree lhs;
102 tree rhs;
104 /* Traversing an edge may also indicate one or more particular conditions
105 are true or false. */
106 VEC(cond_equivalence, heap) *cond_equivalences;
109 /* Hash table with expressions made available during the renaming process.
110 When an assignment of the form X_i = EXPR is found, the statement is
111 stored in this table. If the same expression EXPR is later found on the
112 RHS of another statement, it is replaced with X_i (thus performing
113 global redundancy elimination). Similarly as we pass through conditionals
114 we record the conditional itself as having either a true or false value
115 in this table. */
116 static htab_t avail_exprs;
118 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
119 expressions it enters into the hash table along with a marker entry
120 (null). When we finish processing the block, we pop off entries and
121 remove the expressions from the global hash table until we hit the
122 marker. */
123 typedef struct expr_hash_elt * expr_hash_elt_t;
124 DEF_VEC_P(expr_hash_elt_t);
125 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
127 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
129 /* Structure for entries in the expression hash table. */
131 struct expr_hash_elt
133 /* The value (lhs) of this expression. */
134 tree lhs;
136 /* The expression (rhs) we want to record. */
137 struct hashable_expr expr;
139 /* The stmt pointer if this element corresponds to a statement. */
140 gimple stmt;
142 /* The hash value for RHS. */
143 hashval_t hash;
145 /* A unique stamp, typically the address of the hash
146 element itself, used in removing entries from the table. */
147 struct expr_hash_elt *stamp;
150 /* Stack of dest,src pairs that need to be restored during finalization.
152 A NULL entry is used to mark the end of pairs which need to be
153 restored during finalization of this block. */
154 static VEC(tree,heap) *const_and_copies_stack;
156 /* Track whether or not we have changed the control flow graph. */
157 static bool cfg_altered;
159 /* Bitmap of blocks that have had EH statements cleaned. We should
160 remove their dead edges eventually. */
161 static bitmap need_eh_cleanup;
163 /* Statistics for dominator optimizations. */
164 struct opt_stats_d
166 long num_stmts;
167 long num_exprs_considered;
168 long num_re;
169 long num_const_prop;
170 long num_copy_prop;
173 static struct opt_stats_d opt_stats;
175 /* Local functions. */
176 static void optimize_stmt (basic_block, gimple_stmt_iterator);
177 static tree lookup_avail_expr (gimple, bool);
178 static hashval_t avail_expr_hash (const void *);
179 static hashval_t real_avail_expr_hash (const void *);
180 static int avail_expr_eq (const void *, const void *);
181 static void htab_statistics (FILE *, htab_t);
182 static void record_cond (cond_equivalence *);
183 static void record_const_or_copy (tree, tree);
184 static void record_equality (tree, tree);
185 static void record_equivalences_from_phis (basic_block);
186 static void record_equivalences_from_incoming_edge (basic_block);
187 static void eliminate_redundant_computations (gimple_stmt_iterator *);
188 static void record_equivalences_from_stmt (gimple, int);
189 static void dom_thread_across_edge (struct dom_walk_data *, edge);
190 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
191 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
192 static void remove_local_expressions_from_table (void);
193 static void restore_vars_to_original_value (void);
194 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
197 /* Given a statement STMT, initialize the hash table element pointed to
198 by ELEMENT. */
200 static void
201 initialize_hash_element (gimple stmt, tree lhs,
202 struct expr_hash_elt *element)
204 enum gimple_code code = gimple_code (stmt);
205 struct hashable_expr *expr = &element->expr;
207 if (code == GIMPLE_ASSIGN)
209 enum tree_code subcode = gimple_assign_rhs_code (stmt);
211 expr->type = NULL_TREE;
213 switch (get_gimple_rhs_class (subcode))
215 case GIMPLE_SINGLE_RHS:
216 expr->kind = EXPR_SINGLE;
217 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
218 break;
219 case GIMPLE_UNARY_RHS:
220 expr->kind = EXPR_UNARY;
221 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
222 expr->ops.unary.op = subcode;
223 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
224 break;
225 case GIMPLE_BINARY_RHS:
226 expr->kind = EXPR_BINARY;
227 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
228 expr->ops.binary.op = subcode;
229 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
230 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
231 break;
232 case GIMPLE_TERNARY_RHS:
233 expr->kind = EXPR_TERNARY;
234 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
235 expr->ops.ternary.op = subcode;
236 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
237 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
238 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
239 break;
240 default:
241 gcc_unreachable ();
244 else if (code == GIMPLE_COND)
246 expr->type = boolean_type_node;
247 expr->kind = EXPR_BINARY;
248 expr->ops.binary.op = gimple_cond_code (stmt);
249 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
250 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
252 else if (code == GIMPLE_CALL)
254 size_t nargs = gimple_call_num_args (stmt);
255 size_t i;
257 gcc_assert (gimple_call_lhs (stmt));
259 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
260 expr->kind = EXPR_CALL;
261 expr->ops.call.fn_from = stmt;
263 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
264 expr->ops.call.pure = true;
265 else
266 expr->ops.call.pure = false;
268 expr->ops.call.nargs = nargs;
269 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
270 for (i = 0; i < nargs; i++)
271 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
273 else if (code == GIMPLE_SWITCH)
275 expr->type = TREE_TYPE (gimple_switch_index (stmt));
276 expr->kind = EXPR_SINGLE;
277 expr->ops.single.rhs = gimple_switch_index (stmt);
279 else if (code == GIMPLE_GOTO)
281 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
282 expr->kind = EXPR_SINGLE;
283 expr->ops.single.rhs = gimple_goto_dest (stmt);
285 else
286 gcc_unreachable ();
288 element->lhs = lhs;
289 element->stmt = stmt;
290 element->hash = avail_expr_hash (element);
291 element->stamp = element;
294 /* Given a conditional expression COND as a tree, initialize
295 a hashable_expr expression EXPR. The conditional must be a
296 comparison or logical negation. A constant or a variable is
297 not permitted. */
299 static void
300 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
302 expr->type = boolean_type_node;
304 if (COMPARISON_CLASS_P (cond))
306 expr->kind = EXPR_BINARY;
307 expr->ops.binary.op = TREE_CODE (cond);
308 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
309 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
311 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
313 expr->kind = EXPR_UNARY;
314 expr->ops.unary.op = TRUTH_NOT_EXPR;
315 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
317 else
318 gcc_unreachable ();
321 /* Given a hashable_expr expression EXPR and an LHS,
322 initialize the hash table element pointed to by ELEMENT. */
324 static void
325 initialize_hash_element_from_expr (struct hashable_expr *expr,
326 tree lhs,
327 struct expr_hash_elt *element)
329 element->expr = *expr;
330 element->lhs = lhs;
331 element->stmt = NULL;
332 element->hash = avail_expr_hash (element);
333 element->stamp = element;
336 /* Compare two hashable_expr structures for equivalence.
337 They are considered equivalent when the the expressions
338 they denote must necessarily be equal. The logic is intended
339 to follow that of operand_equal_p in fold-const.c */
341 static bool
342 hashable_expr_equal_p (const struct hashable_expr *expr0,
343 const struct hashable_expr *expr1)
345 tree type0 = expr0->type;
346 tree type1 = expr1->type;
348 /* If either type is NULL, there is nothing to check. */
349 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
350 return false;
352 /* If both types don't have the same signedness, precision, and mode,
353 then we can't consider them equal. */
354 if (type0 != type1
355 && (TREE_CODE (type0) == ERROR_MARK
356 || TREE_CODE (type1) == ERROR_MARK
357 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
358 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
359 || TYPE_MODE (type0) != TYPE_MODE (type1)))
360 return false;
362 if (expr0->kind != expr1->kind)
363 return false;
365 switch (expr0->kind)
367 case EXPR_SINGLE:
368 return operand_equal_p (expr0->ops.single.rhs,
369 expr1->ops.single.rhs, 0);
371 case EXPR_UNARY:
372 if (expr0->ops.unary.op != expr1->ops.unary.op)
373 return false;
375 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
376 || expr0->ops.unary.op == NON_LVALUE_EXPR)
377 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
378 return false;
380 return operand_equal_p (expr0->ops.unary.opnd,
381 expr1->ops.unary.opnd, 0);
383 case EXPR_BINARY:
384 if (expr0->ops.binary.op != expr1->ops.binary.op)
385 return false;
387 if (operand_equal_p (expr0->ops.binary.opnd0,
388 expr1->ops.binary.opnd0, 0)
389 && operand_equal_p (expr0->ops.binary.opnd1,
390 expr1->ops.binary.opnd1, 0))
391 return true;
393 /* For commutative ops, allow the other order. */
394 return (commutative_tree_code (expr0->ops.binary.op)
395 && operand_equal_p (expr0->ops.binary.opnd0,
396 expr1->ops.binary.opnd1, 0)
397 && operand_equal_p (expr0->ops.binary.opnd1,
398 expr1->ops.binary.opnd0, 0));
400 case EXPR_TERNARY:
401 if (expr0->ops.ternary.op != expr1->ops.ternary.op
402 || !operand_equal_p (expr0->ops.ternary.opnd2,
403 expr1->ops.ternary.opnd2, 0))
404 return false;
406 if (operand_equal_p (expr0->ops.ternary.opnd0,
407 expr1->ops.ternary.opnd0, 0)
408 && operand_equal_p (expr0->ops.ternary.opnd1,
409 expr1->ops.ternary.opnd1, 0))
410 return true;
412 /* For commutative ops, allow the other order. */
413 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
414 && operand_equal_p (expr0->ops.ternary.opnd0,
415 expr1->ops.ternary.opnd1, 0)
416 && operand_equal_p (expr0->ops.ternary.opnd1,
417 expr1->ops.ternary.opnd0, 0));
419 case EXPR_CALL:
421 size_t i;
423 /* If the calls are to different functions, then they
424 clearly cannot be equal. */
425 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
426 expr1->ops.call.fn_from))
427 return false;
429 if (! expr0->ops.call.pure)
430 return false;
432 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
433 return false;
435 for (i = 0; i < expr0->ops.call.nargs; i++)
436 if (! operand_equal_p (expr0->ops.call.args[i],
437 expr1->ops.call.args[i], 0))
438 return false;
440 return true;
443 default:
444 gcc_unreachable ();
448 /* Compute a hash value for a hashable_expr value EXPR and a
449 previously accumulated hash value VAL. If two hashable_expr
450 values compare equal with hashable_expr_equal_p, they must
451 hash to the same value, given an identical value of VAL.
452 The logic is intended to follow iterative_hash_expr in tree.c. */
454 static hashval_t
455 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
457 switch (expr->kind)
459 case EXPR_SINGLE:
460 val = iterative_hash_expr (expr->ops.single.rhs, val);
461 break;
463 case EXPR_UNARY:
464 val = iterative_hash_object (expr->ops.unary.op, val);
466 /* Make sure to include signedness in the hash computation.
467 Don't hash the type, that can lead to having nodes which
468 compare equal according to operand_equal_p, but which
469 have different hash codes. */
470 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
471 || expr->ops.unary.op == NON_LVALUE_EXPR)
472 val += TYPE_UNSIGNED (expr->type);
474 val = iterative_hash_expr (expr->ops.unary.opnd, val);
475 break;
477 case EXPR_BINARY:
478 val = iterative_hash_object (expr->ops.binary.op, val);
479 if (commutative_tree_code (expr->ops.binary.op))
480 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
481 expr->ops.binary.opnd1, val);
482 else
484 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
485 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
487 break;
489 case EXPR_TERNARY:
490 val = iterative_hash_object (expr->ops.ternary.op, val);
491 if (commutative_ternary_tree_code (expr->ops.ternary.op))
492 val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
493 expr->ops.ternary.opnd1, val);
494 else
496 val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
497 val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
499 val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
500 break;
502 case EXPR_CALL:
504 size_t i;
505 enum tree_code code = CALL_EXPR;
506 gimple fn_from;
508 val = iterative_hash_object (code, val);
509 fn_from = expr->ops.call.fn_from;
510 if (gimple_call_internal_p (fn_from))
511 val = iterative_hash_hashval_t
512 ((hashval_t) gimple_call_internal_fn (fn_from), val);
513 else
514 val = iterative_hash_expr (gimple_call_fn (fn_from), val);
515 for (i = 0; i < expr->ops.call.nargs; i++)
516 val = iterative_hash_expr (expr->ops.call.args[i], val);
518 break;
520 default:
521 gcc_unreachable ();
524 return val;
527 /* Print a diagnostic dump of an expression hash table entry. */
529 static void
530 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
532 if (element->stmt)
533 fprintf (stream, "STMT ");
534 else
535 fprintf (stream, "COND ");
537 if (element->lhs)
539 print_generic_expr (stream, element->lhs, 0);
540 fprintf (stream, " = ");
543 switch (element->expr.kind)
545 case EXPR_SINGLE:
546 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
547 break;
549 case EXPR_UNARY:
550 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
551 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
552 break;
554 case EXPR_BINARY:
555 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
556 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
557 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
558 break;
560 case EXPR_TERNARY:
561 fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
562 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
563 fputs (", ", stream);
564 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
565 fputs (", ", stream);
566 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
567 fputs (">", stream);
568 break;
570 case EXPR_CALL:
572 size_t i;
573 size_t nargs = element->expr.ops.call.nargs;
574 gimple fn_from;
576 fn_from = element->expr.ops.call.fn_from;
577 if (gimple_call_internal_p (fn_from))
578 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
579 stream);
580 else
581 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
582 fprintf (stream, " (");
583 for (i = 0; i < nargs; i++)
585 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
586 if (i + 1 < nargs)
587 fprintf (stream, ", ");
589 fprintf (stream, ")");
591 break;
593 fprintf (stream, "\n");
595 if (element->stmt)
597 fprintf (stream, " ");
598 print_gimple_stmt (stream, element->stmt, 0, 0);
602 /* Delete an expr_hash_elt and reclaim its storage. */
604 static void
605 free_expr_hash_elt (void *elt)
607 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
609 if (element->expr.kind == EXPR_CALL)
610 free (element->expr.ops.call.args);
612 free (element);
615 /* Allocate an EDGE_INFO for edge E and attach it to E.
616 Return the new EDGE_INFO structure. */
618 static struct edge_info *
619 allocate_edge_info (edge e)
621 struct edge_info *edge_info;
623 edge_info = XCNEW (struct edge_info);
625 e->aux = edge_info;
626 return edge_info;
629 /* Free all EDGE_INFO structures associated with edges in the CFG.
630 If a particular edge can be threaded, copy the redirection
631 target from the EDGE_INFO structure into the edge's AUX field
632 as required by code to update the CFG and SSA graph for
633 jump threading. */
635 static void
636 free_all_edge_infos (void)
638 basic_block bb;
639 edge_iterator ei;
640 edge e;
642 FOR_EACH_BB (bb)
644 FOR_EACH_EDGE (e, ei, bb->preds)
646 struct edge_info *edge_info = (struct edge_info *) e->aux;
648 if (edge_info)
650 if (edge_info->cond_equivalences)
651 VEC_free (cond_equivalence, heap, edge_info->cond_equivalences);
652 free (edge_info);
653 e->aux = NULL;
659 /* Jump threading, redundancy elimination and const/copy propagation.
661 This pass may expose new symbols that need to be renamed into SSA. For
662 every new symbol exposed, its corresponding bit will be set in
663 VARS_TO_RENAME. */
665 static unsigned int
666 tree_ssa_dominator_optimize (void)
668 struct dom_walk_data walk_data;
670 memset (&opt_stats, 0, sizeof (opt_stats));
672 /* Create our hash tables. */
673 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
674 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
675 const_and_copies_stack = VEC_alloc (tree, heap, 20);
676 need_eh_cleanup = BITMAP_ALLOC (NULL);
678 /* Setup callbacks for the generic dominator tree walker. */
679 walk_data.dom_direction = CDI_DOMINATORS;
680 walk_data.initialize_block_local_data = NULL;
681 walk_data.before_dom_children = dom_opt_enter_block;
682 walk_data.after_dom_children = dom_opt_leave_block;
683 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
684 When we attach more stuff we'll need to fill this out with a real
685 structure. */
686 walk_data.global_data = NULL;
687 walk_data.block_local_data_size = 0;
689 /* Now initialize the dominator walker. */
690 init_walk_dominator_tree (&walk_data);
692 calculate_dominance_info (CDI_DOMINATORS);
693 cfg_altered = false;
695 /* We need to know loop structures in order to avoid destroying them
696 in jump threading. Note that we still can e.g. thread through loop
697 headers to an exit edge, or through loop header to the loop body, assuming
698 that we update the loop info. */
699 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
701 /* Initialize the value-handle array. */
702 threadedge_initialize_values ();
704 /* We need accurate information regarding back edges in the CFG
705 for jump threading; this may include back edges that are not part of
706 a single loop. */
707 mark_dfs_back_edges ();
709 /* Recursively walk the dominator tree optimizing statements. */
710 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
713 gimple_stmt_iterator gsi;
714 basic_block bb;
715 FOR_EACH_BB (bb)
717 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
718 update_stmt_if_modified (gsi_stmt (gsi));
722 /* If we exposed any new variables, go ahead and put them into
723 SSA form now, before we handle jump threading. This simplifies
724 interactions between rewriting of _DECL nodes into SSA form
725 and rewriting SSA_NAME nodes into SSA form after block
726 duplication and CFG manipulation. */
727 update_ssa (TODO_update_ssa);
729 free_all_edge_infos ();
731 /* Thread jumps, creating duplicate blocks as needed. */
732 cfg_altered |= thread_through_all_blocks (first_pass_instance);
734 if (cfg_altered)
735 free_dominance_info (CDI_DOMINATORS);
737 /* Removal of statements may make some EH edges dead. Purge
738 such edges from the CFG as needed. */
739 if (!bitmap_empty_p (need_eh_cleanup))
741 unsigned i;
742 bitmap_iterator bi;
744 /* Jump threading may have created forwarder blocks from blocks
745 needing EH cleanup; the new successor of these blocks, which
746 has inherited from the original block, needs the cleanup. */
747 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
749 basic_block bb = BASIC_BLOCK (i);
750 if (bb
751 && single_succ_p (bb)
752 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
754 bitmap_clear_bit (need_eh_cleanup, i);
755 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
759 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
760 bitmap_zero (need_eh_cleanup);
763 statistics_counter_event (cfun, "Redundant expressions eliminated",
764 opt_stats.num_re);
765 statistics_counter_event (cfun, "Constants propagated",
766 opt_stats.num_const_prop);
767 statistics_counter_event (cfun, "Copies propagated",
768 opt_stats.num_copy_prop);
770 /* Debugging dumps. */
771 if (dump_file && (dump_flags & TDF_STATS))
772 dump_dominator_optimization_stats (dump_file);
774 loop_optimizer_finalize ();
776 /* Delete our main hashtable. */
777 htab_delete (avail_exprs);
779 /* And finalize the dominator walker. */
780 fini_walk_dominator_tree (&walk_data);
782 /* Free asserted bitmaps and stacks. */
783 BITMAP_FREE (need_eh_cleanup);
785 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
786 VEC_free (tree, heap, const_and_copies_stack);
788 /* Free the value-handle array. */
789 threadedge_finalize_values ();
790 ssa_name_values = NULL;
792 return 0;
795 static bool
796 gate_dominator (void)
798 return flag_tree_dom != 0;
801 struct gimple_opt_pass pass_dominator =
804 GIMPLE_PASS,
805 "dom", /* name */
806 gate_dominator, /* gate */
807 tree_ssa_dominator_optimize, /* execute */
808 NULL, /* sub */
809 NULL, /* next */
810 0, /* static_pass_number */
811 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
812 PROP_cfg | PROP_ssa, /* properties_required */
813 0, /* properties_provided */
814 0, /* properties_destroyed */
815 0, /* todo_flags_start */
816 TODO_cleanup_cfg
817 | TODO_update_ssa
818 | TODO_verify_ssa
819 | TODO_verify_flow /* todo_flags_finish */
824 /* Given a conditional statement CONDSTMT, convert the
825 condition to a canonical form. */
827 static void
828 canonicalize_comparison (gimple condstmt)
830 tree op0;
831 tree op1;
832 enum tree_code code;
834 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
836 op0 = gimple_cond_lhs (condstmt);
837 op1 = gimple_cond_rhs (condstmt);
839 code = gimple_cond_code (condstmt);
841 /* If it would be profitable to swap the operands, then do so to
842 canonicalize the statement, enabling better optimization.
844 By placing canonicalization of such expressions here we
845 transparently keep statements in canonical form, even
846 when the statement is modified. */
847 if (tree_swap_operands_p (op0, op1, false))
849 /* For relationals we need to swap the operands
850 and change the code. */
851 if (code == LT_EXPR
852 || code == GT_EXPR
853 || code == LE_EXPR
854 || code == GE_EXPR)
856 code = swap_tree_comparison (code);
858 gimple_cond_set_code (condstmt, code);
859 gimple_cond_set_lhs (condstmt, op1);
860 gimple_cond_set_rhs (condstmt, op0);
862 update_stmt (condstmt);
867 /* Initialize local stacks for this optimizer and record equivalences
868 upon entry to BB. Equivalences can come from the edge traversed to
869 reach BB or they may come from PHI nodes at the start of BB. */
871 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
872 LIMIT entries left in LOCALs. */
874 static void
875 remove_local_expressions_from_table (void)
877 /* Remove all the expressions made available in this block. */
878 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
880 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
881 void **slot;
883 if (victim == NULL)
884 break;
886 /* This must precede the actual removal from the hash table,
887 as ELEMENT and the table entry may share a call argument
888 vector which will be freed during removal. */
889 if (dump_file && (dump_flags & TDF_DETAILS))
891 fprintf (dump_file, "<<<< ");
892 print_expr_hash_elt (dump_file, victim);
895 slot = htab_find_slot_with_hash (avail_exprs,
896 victim, victim->hash, NO_INSERT);
897 gcc_assert (slot && *slot == (void *) victim);
898 htab_clear_slot (avail_exprs, slot);
902 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
903 CONST_AND_COPIES to its original state, stopping when we hit a
904 NULL marker. */
906 static void
907 restore_vars_to_original_value (void)
909 while (VEC_length (tree, const_and_copies_stack) > 0)
911 tree prev_value, dest;
913 dest = VEC_pop (tree, const_and_copies_stack);
915 if (dest == NULL)
916 break;
918 if (dump_file && (dump_flags & TDF_DETAILS))
920 fprintf (dump_file, "<<<< COPY ");
921 print_generic_expr (dump_file, dest, 0);
922 fprintf (dump_file, " = ");
923 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
924 fprintf (dump_file, "\n");
927 prev_value = VEC_pop (tree, const_and_copies_stack);
928 set_ssa_name_value (dest, prev_value);
932 /* A trivial wrapper so that we can present the generic jump
933 threading code with a simple API for simplifying statements. */
934 static tree
935 simplify_stmt_for_jump_threading (gimple stmt,
936 gimple within_stmt ATTRIBUTE_UNUSED)
938 return lookup_avail_expr (stmt, false);
941 /* Wrapper for common code to attempt to thread an edge. For example,
942 it handles lazily building the dummy condition and the bookkeeping
943 when jump threading is successful. */
945 static void
946 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
948 if (! walk_data->global_data)
950 gimple dummy_cond =
951 gimple_build_cond (NE_EXPR,
952 integer_zero_node, integer_zero_node,
953 NULL, NULL);
954 walk_data->global_data = dummy_cond;
957 thread_across_edge ((gimple) walk_data->global_data, e, false,
958 &const_and_copies_stack,
959 simplify_stmt_for_jump_threading);
962 /* PHI nodes can create equivalences too.
964 Ignoring any alternatives which are the same as the result, if
965 all the alternatives are equal, then the PHI node creates an
966 equivalence. */
968 static void
969 record_equivalences_from_phis (basic_block bb)
971 gimple_stmt_iterator gsi;
973 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
975 gimple phi = gsi_stmt (gsi);
977 tree lhs = gimple_phi_result (phi);
978 tree rhs = NULL;
979 size_t i;
981 for (i = 0; i < gimple_phi_num_args (phi); i++)
983 tree t = gimple_phi_arg_def (phi, i);
985 /* Ignore alternatives which are the same as our LHS. Since
986 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
987 can simply compare pointers. */
988 if (lhs == t)
989 continue;
991 /* If we have not processed an alternative yet, then set
992 RHS to this alternative. */
993 if (rhs == NULL)
994 rhs = t;
995 /* If we have processed an alternative (stored in RHS), then
996 see if it is equal to this one. If it isn't, then stop
997 the search. */
998 else if (! operand_equal_for_phi_arg_p (rhs, t))
999 break;
1002 /* If we had no interesting alternatives, then all the RHS alternatives
1003 must have been the same as LHS. */
1004 if (!rhs)
1005 rhs = lhs;
1007 /* If we managed to iterate through each PHI alternative without
1008 breaking out of the loop, then we have a PHI which may create
1009 a useful equivalence. We do not need to record unwind data for
1010 this, since this is a true assignment and not an equivalence
1011 inferred from a comparison. All uses of this ssa name are dominated
1012 by this assignment, so unwinding just costs time and space. */
1013 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
1014 set_ssa_name_value (lhs, rhs);
1018 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1019 return that edge. Otherwise return NULL. */
1020 static edge
1021 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1023 edge retval = NULL;
1024 edge e;
1025 edge_iterator ei;
1027 FOR_EACH_EDGE (e, ei, bb->preds)
1029 /* A loop back edge can be identified by the destination of
1030 the edge dominating the source of the edge. */
1031 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1032 continue;
1034 /* If we have already seen a non-loop edge, then we must have
1035 multiple incoming non-loop edges and thus we return NULL. */
1036 if (retval)
1037 return NULL;
1039 /* This is the first non-loop incoming edge we have found. Record
1040 it. */
1041 retval = e;
1044 return retval;
1047 /* Record any equivalences created by the incoming edge to BB. If BB
1048 has more than one incoming edge, then no equivalence is created. */
1050 static void
1051 record_equivalences_from_incoming_edge (basic_block bb)
1053 edge e;
1054 basic_block parent;
1055 struct edge_info *edge_info;
1057 /* If our parent block ended with a control statement, then we may be
1058 able to record some equivalences based on which outgoing edge from
1059 the parent was followed. */
1060 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1062 e = single_incoming_edge_ignoring_loop_edges (bb);
1064 /* If we had a single incoming edge from our parent block, then enter
1065 any data associated with the edge into our tables. */
1066 if (e && e->src == parent)
1068 unsigned int i;
1070 edge_info = (struct edge_info *) e->aux;
1072 if (edge_info)
1074 tree lhs = edge_info->lhs;
1075 tree rhs = edge_info->rhs;
1076 cond_equivalence *eq;
1078 if (lhs)
1079 record_equality (lhs, rhs);
1081 for (i = 0; VEC_iterate (cond_equivalence,
1082 edge_info->cond_equivalences, i, eq); ++i)
1083 record_cond (eq);
1088 /* Dump SSA statistics on FILE. */
1090 void
1091 dump_dominator_optimization_stats (FILE *file)
1093 fprintf (file, "Total number of statements: %6ld\n\n",
1094 opt_stats.num_stmts);
1095 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1096 opt_stats.num_exprs_considered);
1098 fprintf (file, "\nHash table statistics:\n");
1100 fprintf (file, " avail_exprs: ");
1101 htab_statistics (file, avail_exprs);
1105 /* Dump SSA statistics on stderr. */
1107 DEBUG_FUNCTION void
1108 debug_dominator_optimization_stats (void)
1110 dump_dominator_optimization_stats (stderr);
1114 /* Dump statistics for the hash table HTAB. */
1116 static void
1117 htab_statistics (FILE *file, htab_t htab)
1119 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1120 (long) htab_size (htab),
1121 (long) htab_elements (htab),
1122 htab_collisions (htab));
1126 /* Enter condition equivalence into the expression hash table.
1127 This indicates that a conditional expression has a known
1128 boolean value. */
1130 static void
1131 record_cond (cond_equivalence *p)
1133 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1134 void **slot;
1136 initialize_hash_element_from_expr (&p->cond, p->value, element);
1138 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1139 element->hash, INSERT);
1140 if (*slot == NULL)
1142 *slot = (void *) element;
1144 if (dump_file && (dump_flags & TDF_DETAILS))
1146 fprintf (dump_file, "1>>> ");
1147 print_expr_hash_elt (dump_file, element);
1150 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1152 else
1153 free (element);
1156 /* Build a cond_equivalence record indicating that the comparison
1157 CODE holds between operands OP0 and OP1 and push it to **P. */
1159 static void
1160 build_and_record_new_cond (enum tree_code code,
1161 tree op0, tree op1,
1162 VEC(cond_equivalence, heap) **p)
1164 cond_equivalence c;
1165 struct hashable_expr *cond = &c.cond;
1167 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1169 cond->type = boolean_type_node;
1170 cond->kind = EXPR_BINARY;
1171 cond->ops.binary.op = code;
1172 cond->ops.binary.opnd0 = op0;
1173 cond->ops.binary.opnd1 = op1;
1175 c.value = boolean_true_node;
1176 VEC_safe_push (cond_equivalence, heap, *p, &c);
1179 /* Record that COND is true and INVERTED is false into the edge information
1180 structure. Also record that any conditions dominated by COND are true
1181 as well.
1183 For example, if a < b is true, then a <= b must also be true. */
1185 static void
1186 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1188 tree op0, op1;
1189 cond_equivalence c;
1191 if (!COMPARISON_CLASS_P (cond))
1192 return;
1194 op0 = TREE_OPERAND (cond, 0);
1195 op1 = TREE_OPERAND (cond, 1);
1197 switch (TREE_CODE (cond))
1199 case LT_EXPR:
1200 case GT_EXPR:
1201 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1203 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1204 &edge_info->cond_equivalences);
1205 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1206 &edge_info->cond_equivalences);
1209 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1210 ? LE_EXPR : GE_EXPR),
1211 op0, op1, &edge_info->cond_equivalences);
1212 build_and_record_new_cond (NE_EXPR, op0, op1,
1213 &edge_info->cond_equivalences);
1214 break;
1216 case GE_EXPR:
1217 case LE_EXPR:
1218 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1220 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1221 &edge_info->cond_equivalences);
1223 break;
1225 case EQ_EXPR:
1226 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1228 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1229 &edge_info->cond_equivalences);
1231 build_and_record_new_cond (LE_EXPR, op0, op1,
1232 &edge_info->cond_equivalences);
1233 build_and_record_new_cond (GE_EXPR, op0, op1,
1234 &edge_info->cond_equivalences);
1235 break;
1237 case UNORDERED_EXPR:
1238 build_and_record_new_cond (NE_EXPR, op0, op1,
1239 &edge_info->cond_equivalences);
1240 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1241 &edge_info->cond_equivalences);
1242 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1243 &edge_info->cond_equivalences);
1244 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1245 &edge_info->cond_equivalences);
1246 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1247 &edge_info->cond_equivalences);
1248 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1249 &edge_info->cond_equivalences);
1250 break;
1252 case UNLT_EXPR:
1253 case UNGT_EXPR:
1254 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1255 ? UNLE_EXPR : UNGE_EXPR),
1256 op0, op1, &edge_info->cond_equivalences);
1257 build_and_record_new_cond (NE_EXPR, op0, op1,
1258 &edge_info->cond_equivalences);
1259 break;
1261 case UNEQ_EXPR:
1262 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1263 &edge_info->cond_equivalences);
1264 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1265 &edge_info->cond_equivalences);
1266 break;
1268 case LTGT_EXPR:
1269 build_and_record_new_cond (NE_EXPR, op0, op1,
1270 &edge_info->cond_equivalences);
1271 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1272 &edge_info->cond_equivalences);
1273 break;
1275 default:
1276 break;
1279 /* Now store the original true and false conditions into the first
1280 two slots. */
1281 initialize_expr_from_cond (cond, &c.cond);
1282 c.value = boolean_true_node;
1283 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1285 /* It is possible for INVERTED to be the negation of a comparison,
1286 and not a valid RHS or GIMPLE_COND condition. This happens because
1287 invert_truthvalue may return such an expression when asked to invert
1288 a floating-point comparison. These comparisons are not assumed to
1289 obey the trichotomy law. */
1290 initialize_expr_from_cond (inverted, &c.cond);
1291 c.value = boolean_false_node;
1292 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1295 /* A helper function for record_const_or_copy and record_equality.
1296 Do the work of recording the value and undo info. */
1298 static void
1299 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1301 set_ssa_name_value (x, y);
1303 if (dump_file && (dump_flags & TDF_DETAILS))
1305 fprintf (dump_file, "0>>> COPY ");
1306 print_generic_expr (dump_file, x, 0);
1307 fprintf (dump_file, " = ");
1308 print_generic_expr (dump_file, y, 0);
1309 fprintf (dump_file, "\n");
1312 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1313 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1314 VEC_quick_push (tree, const_and_copies_stack, x);
1317 /* Return the loop depth of the basic block of the defining statement of X.
1318 This number should not be treated as absolutely correct because the loop
1319 information may not be completely up-to-date when dom runs. However, it
1320 will be relatively correct, and as more passes are taught to keep loop info
1321 up to date, the result will become more and more accurate. */
1324 loop_depth_of_name (tree x)
1326 gimple defstmt;
1327 basic_block defbb;
1329 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1330 if (TREE_CODE (x) != SSA_NAME)
1331 return 0;
1333 /* Otherwise return the loop depth of the defining statement's bb.
1334 Note that there may not actually be a bb for this statement, if the
1335 ssa_name is live on entry. */
1336 defstmt = SSA_NAME_DEF_STMT (x);
1337 defbb = gimple_bb (defstmt);
1338 if (!defbb)
1339 return 0;
1341 return defbb->loop_depth;
1344 /* Record that X is equal to Y in const_and_copies. Record undo
1345 information in the block-local vector. */
1347 static void
1348 record_const_or_copy (tree x, tree y)
1350 tree prev_x = SSA_NAME_VALUE (x);
1352 gcc_assert (TREE_CODE (x) == SSA_NAME);
1354 if (TREE_CODE (y) == SSA_NAME)
1356 tree tmp = SSA_NAME_VALUE (y);
1357 if (tmp)
1358 y = tmp;
1361 record_const_or_copy_1 (x, y, prev_x);
1364 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1365 This constrains the cases in which we may treat this as assignment. */
1367 static void
1368 record_equality (tree x, tree y)
1370 tree prev_x = NULL, prev_y = NULL;
1372 if (TREE_CODE (x) == SSA_NAME)
1373 prev_x = SSA_NAME_VALUE (x);
1374 if (TREE_CODE (y) == SSA_NAME)
1375 prev_y = SSA_NAME_VALUE (y);
1377 /* If one of the previous values is invariant, or invariant in more loops
1378 (by depth), then use that.
1379 Otherwise it doesn't matter which value we choose, just so
1380 long as we canonicalize on one value. */
1381 if (is_gimple_min_invariant (y))
1383 else if (is_gimple_min_invariant (x)
1384 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1385 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1386 else if (prev_x && is_gimple_min_invariant (prev_x))
1387 x = y, y = prev_x, prev_x = prev_y;
1388 else if (prev_y)
1389 y = prev_y;
1391 /* After the swapping, we must have one SSA_NAME. */
1392 if (TREE_CODE (x) != SSA_NAME)
1393 return;
1395 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1396 variable compared against zero. If we're honoring signed zeros,
1397 then we cannot record this value unless we know that the value is
1398 nonzero. */
1399 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1400 && (TREE_CODE (y) != REAL_CST
1401 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1402 return;
1404 record_const_or_copy_1 (x, y, prev_x);
1407 /* Returns true when STMT is a simple iv increment. It detects the
1408 following situation:
1410 i_1 = phi (..., i_2)
1411 i_2 = i_1 +/- ... */
1413 static bool
1414 simple_iv_increment_p (gimple stmt)
1416 tree lhs, preinc;
1417 gimple phi;
1418 size_t i;
1420 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1421 return false;
1423 lhs = gimple_assign_lhs (stmt);
1424 if (TREE_CODE (lhs) != SSA_NAME)
1425 return false;
1427 if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
1428 && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
1429 return false;
1431 preinc = gimple_assign_rhs1 (stmt);
1433 if (TREE_CODE (preinc) != SSA_NAME)
1434 return false;
1436 phi = SSA_NAME_DEF_STMT (preinc);
1437 if (gimple_code (phi) != GIMPLE_PHI)
1438 return false;
1440 for (i = 0; i < gimple_phi_num_args (phi); i++)
1441 if (gimple_phi_arg_def (phi, i) == lhs)
1442 return true;
1444 return false;
1447 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1448 known value for that SSA_NAME (or NULL if no value is known).
1450 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1451 successors of BB. */
1453 static void
1454 cprop_into_successor_phis (basic_block bb)
1456 edge e;
1457 edge_iterator ei;
1459 FOR_EACH_EDGE (e, ei, bb->succs)
1461 int indx;
1462 gimple_stmt_iterator gsi;
1464 /* If this is an abnormal edge, then we do not want to copy propagate
1465 into the PHI alternative associated with this edge. */
1466 if (e->flags & EDGE_ABNORMAL)
1467 continue;
1469 gsi = gsi_start_phis (e->dest);
1470 if (gsi_end_p (gsi))
1471 continue;
1473 indx = e->dest_idx;
1474 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1476 tree new_val;
1477 use_operand_p orig_p;
1478 tree orig_val;
1479 gimple phi = gsi_stmt (gsi);
1481 /* The alternative may be associated with a constant, so verify
1482 it is an SSA_NAME before doing anything with it. */
1483 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1484 orig_val = get_use_from_ptr (orig_p);
1485 if (TREE_CODE (orig_val) != SSA_NAME)
1486 continue;
1488 /* If we have *ORIG_P in our constant/copy table, then replace
1489 ORIG_P with its value in our constant/copy table. */
1490 new_val = SSA_NAME_VALUE (orig_val);
1491 if (new_val
1492 && new_val != orig_val
1493 && (TREE_CODE (new_val) == SSA_NAME
1494 || is_gimple_min_invariant (new_val))
1495 && may_propagate_copy (orig_val, new_val))
1496 propagate_value (orig_p, new_val);
1501 /* We have finished optimizing BB, record any information implied by
1502 taking a specific outgoing edge from BB. */
1504 static void
1505 record_edge_info (basic_block bb)
1507 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1508 struct edge_info *edge_info;
1510 if (! gsi_end_p (gsi))
1512 gimple stmt = gsi_stmt (gsi);
1513 location_t loc = gimple_location (stmt);
1515 if (gimple_code (stmt) == GIMPLE_SWITCH)
1517 tree index = gimple_switch_index (stmt);
1519 if (TREE_CODE (index) == SSA_NAME)
1521 int i;
1522 int n_labels = gimple_switch_num_labels (stmt);
1523 tree *info = XCNEWVEC (tree, last_basic_block);
1524 edge e;
1525 edge_iterator ei;
1527 for (i = 0; i < n_labels; i++)
1529 tree label = gimple_switch_label (stmt, i);
1530 basic_block target_bb = label_to_block (CASE_LABEL (label));
1531 if (CASE_HIGH (label)
1532 || !CASE_LOW (label)
1533 || info[target_bb->index])
1534 info[target_bb->index] = error_mark_node;
1535 else
1536 info[target_bb->index] = label;
1539 FOR_EACH_EDGE (e, ei, bb->succs)
1541 basic_block target_bb = e->dest;
1542 tree label = info[target_bb->index];
1544 if (label != NULL && label != error_mark_node)
1546 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1547 CASE_LOW (label));
1548 edge_info = allocate_edge_info (e);
1549 edge_info->lhs = index;
1550 edge_info->rhs = x;
1553 free (info);
1557 /* A COND_EXPR may create equivalences too. */
1558 if (gimple_code (stmt) == GIMPLE_COND)
1560 edge true_edge;
1561 edge false_edge;
1563 tree op0 = gimple_cond_lhs (stmt);
1564 tree op1 = gimple_cond_rhs (stmt);
1565 enum tree_code code = gimple_cond_code (stmt);
1567 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1569 /* Special case comparing booleans against a constant as we
1570 know the value of OP0 on both arms of the branch. i.e., we
1571 can record an equivalence for OP0 rather than COND. */
1572 if ((code == EQ_EXPR || code == NE_EXPR)
1573 && TREE_CODE (op0) == SSA_NAME
1574 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1575 && is_gimple_min_invariant (op1))
1577 if (code == EQ_EXPR)
1579 edge_info = allocate_edge_info (true_edge);
1580 edge_info->lhs = op0;
1581 edge_info->rhs = (integer_zerop (op1)
1582 ? boolean_false_node
1583 : boolean_true_node);
1585 edge_info = allocate_edge_info (false_edge);
1586 edge_info->lhs = op0;
1587 edge_info->rhs = (integer_zerop (op1)
1588 ? boolean_true_node
1589 : boolean_false_node);
1591 else
1593 edge_info = allocate_edge_info (true_edge);
1594 edge_info->lhs = op0;
1595 edge_info->rhs = (integer_zerop (op1)
1596 ? boolean_true_node
1597 : boolean_false_node);
1599 edge_info = allocate_edge_info (false_edge);
1600 edge_info->lhs = op0;
1601 edge_info->rhs = (integer_zerop (op1)
1602 ? boolean_false_node
1603 : boolean_true_node);
1606 else if (is_gimple_min_invariant (op0)
1607 && (TREE_CODE (op1) == SSA_NAME
1608 || is_gimple_min_invariant (op1)))
1610 tree cond = build2 (code, boolean_type_node, op0, op1);
1611 tree inverted = invert_truthvalue_loc (loc, cond);
1612 struct edge_info *edge_info;
1614 edge_info = allocate_edge_info (true_edge);
1615 record_conditions (edge_info, cond, inverted);
1617 if (code == EQ_EXPR)
1619 edge_info->lhs = op1;
1620 edge_info->rhs = op0;
1623 edge_info = allocate_edge_info (false_edge);
1624 record_conditions (edge_info, inverted, cond);
1626 if (TREE_CODE (inverted) == EQ_EXPR)
1628 edge_info->lhs = op1;
1629 edge_info->rhs = op0;
1633 else if (TREE_CODE (op0) == SSA_NAME
1634 && (is_gimple_min_invariant (op1)
1635 || TREE_CODE (op1) == SSA_NAME))
1637 tree cond = build2 (code, boolean_type_node, op0, op1);
1638 tree inverted = invert_truthvalue_loc (loc, cond);
1639 struct edge_info *edge_info;
1641 edge_info = allocate_edge_info (true_edge);
1642 record_conditions (edge_info, cond, inverted);
1644 if (code == EQ_EXPR)
1646 edge_info->lhs = op0;
1647 edge_info->rhs = op1;
1650 edge_info = allocate_edge_info (false_edge);
1651 record_conditions (edge_info, inverted, cond);
1653 if (TREE_CODE (inverted) == EQ_EXPR)
1655 edge_info->lhs = op0;
1656 edge_info->rhs = op1;
1661 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1665 static void
1666 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1667 basic_block bb)
1669 gimple_stmt_iterator gsi;
1671 if (dump_file && (dump_flags & TDF_DETAILS))
1672 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1674 /* Push a marker on the stacks of local information so that we know how
1675 far to unwind when we finalize this block. */
1676 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1677 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1679 record_equivalences_from_incoming_edge (bb);
1681 /* PHI nodes can create equivalences too. */
1682 record_equivalences_from_phis (bb);
1684 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1685 optimize_stmt (bb, gsi);
1687 /* Now prepare to process dominated blocks. */
1688 record_edge_info (bb);
1689 cprop_into_successor_phis (bb);
1692 /* We have finished processing the dominator children of BB, perform
1693 any finalization actions in preparation for leaving this node in
1694 the dominator tree. */
1696 static void
1697 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1699 gimple last;
1701 /* If we have an outgoing edge to a block with multiple incoming and
1702 outgoing edges, then we may be able to thread the edge, i.e., we
1703 may be able to statically determine which of the outgoing edges
1704 will be traversed when the incoming edge from BB is traversed. */
1705 if (single_succ_p (bb)
1706 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1707 && potentially_threadable_block (single_succ (bb)))
1709 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1711 else if ((last = last_stmt (bb))
1712 && gimple_code (last) == GIMPLE_COND
1713 && EDGE_COUNT (bb->succs) == 2
1714 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1715 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1717 edge true_edge, false_edge;
1719 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1721 /* Only try to thread the edge if it reaches a target block with
1722 more than one predecessor and more than one successor. */
1723 if (potentially_threadable_block (true_edge->dest))
1725 struct edge_info *edge_info;
1726 unsigned int i;
1728 /* Push a marker onto the available expression stack so that we
1729 unwind any expressions related to the TRUE arm before processing
1730 the false arm below. */
1731 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1732 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1734 edge_info = (struct edge_info *) true_edge->aux;
1736 /* If we have info associated with this edge, record it into
1737 our equivalence tables. */
1738 if (edge_info)
1740 cond_equivalence *eq;
1741 tree lhs = edge_info->lhs;
1742 tree rhs = edge_info->rhs;
1744 /* If we have a simple NAME = VALUE equivalence, record it. */
1745 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1746 record_const_or_copy (lhs, rhs);
1748 /* If we have 0 = COND or 1 = COND equivalences, record them
1749 into our expression hash tables. */
1750 for (i = 0; VEC_iterate (cond_equivalence,
1751 edge_info->cond_equivalences, i, eq); ++i)
1752 record_cond (eq);
1755 dom_thread_across_edge (walk_data, true_edge);
1757 /* And restore the various tables to their state before
1758 we threaded this edge. */
1759 remove_local_expressions_from_table ();
1762 /* Similarly for the ELSE arm. */
1763 if (potentially_threadable_block (false_edge->dest))
1765 struct edge_info *edge_info;
1766 unsigned int i;
1768 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1769 edge_info = (struct edge_info *) false_edge->aux;
1771 /* If we have info associated with this edge, record it into
1772 our equivalence tables. */
1773 if (edge_info)
1775 cond_equivalence *eq;
1776 tree lhs = edge_info->lhs;
1777 tree rhs = edge_info->rhs;
1779 /* If we have a simple NAME = VALUE equivalence, record it. */
1780 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1781 record_const_or_copy (lhs, rhs);
1783 /* If we have 0 = COND or 1 = COND equivalences, record them
1784 into our expression hash tables. */
1785 for (i = 0; VEC_iterate (cond_equivalence,
1786 edge_info->cond_equivalences, i, eq); ++i)
1787 record_cond (eq);
1790 /* Now thread the edge. */
1791 dom_thread_across_edge (walk_data, false_edge);
1793 /* No need to remove local expressions from our tables
1794 or restore vars to their original value as that will
1795 be done immediately below. */
1799 remove_local_expressions_from_table ();
1800 restore_vars_to_original_value ();
1803 /* Search for redundant computations in STMT. If any are found, then
1804 replace them with the variable holding the result of the computation.
1806 If safe, record this expression into the available expression hash
1807 table. */
1809 static void
1810 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1812 tree expr_type;
1813 tree cached_lhs;
1814 bool insert = true;
1815 bool assigns_var_p = false;
1817 gimple stmt = gsi_stmt (*gsi);
1819 tree def = gimple_get_lhs (stmt);
1821 /* Certain expressions on the RHS can be optimized away, but can not
1822 themselves be entered into the hash tables. */
1823 if (! def
1824 || TREE_CODE (def) != SSA_NAME
1825 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1826 || gimple_vdef (stmt)
1827 /* Do not record equivalences for increments of ivs. This would create
1828 overlapping live ranges for a very questionable gain. */
1829 || simple_iv_increment_p (stmt))
1830 insert = false;
1832 /* Check if the expression has been computed before. */
1833 cached_lhs = lookup_avail_expr (stmt, insert);
1835 opt_stats.num_exprs_considered++;
1837 /* Get the type of the expression we are trying to optimize. */
1838 if (is_gimple_assign (stmt))
1840 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1841 assigns_var_p = true;
1843 else if (gimple_code (stmt) == GIMPLE_COND)
1844 expr_type = boolean_type_node;
1845 else if (is_gimple_call (stmt))
1847 gcc_assert (gimple_call_lhs (stmt));
1848 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1849 assigns_var_p = true;
1851 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1852 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1853 else
1854 gcc_unreachable ();
1856 if (!cached_lhs)
1857 return;
1859 /* It is safe to ignore types here since we have already done
1860 type checking in the hashing and equality routines. In fact
1861 type checking here merely gets in the way of constant
1862 propagation. Also, make sure that it is safe to propagate
1863 CACHED_LHS into the expression in STMT. */
1864 if ((TREE_CODE (cached_lhs) != SSA_NAME
1865 && (assigns_var_p
1866 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1867 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1869 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
1870 || is_gimple_min_invariant (cached_lhs));
1872 if (dump_file && (dump_flags & TDF_DETAILS))
1874 fprintf (dump_file, " Replaced redundant expr '");
1875 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1876 fprintf (dump_file, "' with '");
1877 print_generic_expr (dump_file, cached_lhs, dump_flags);
1878 fprintf (dump_file, "'\n");
1881 opt_stats.num_re++;
1883 if (assigns_var_p
1884 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1885 cached_lhs = fold_convert (expr_type, cached_lhs);
1887 propagate_tree_value_into_stmt (gsi, cached_lhs);
1889 /* Since it is always necessary to mark the result as modified,
1890 perhaps we should move this into propagate_tree_value_into_stmt
1891 itself. */
1892 gimple_set_modified (gsi_stmt (*gsi), true);
1896 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1897 the available expressions table or the const_and_copies table.
1898 Detect and record those equivalences. */
1899 /* We handle only very simple copy equivalences here. The heavy
1900 lifing is done by eliminate_redundant_computations. */
1902 static void
1903 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1905 tree lhs;
1906 enum tree_code lhs_code;
1908 gcc_assert (is_gimple_assign (stmt));
1910 lhs = gimple_assign_lhs (stmt);
1911 lhs_code = TREE_CODE (lhs);
1913 if (lhs_code == SSA_NAME
1914 && gimple_assign_single_p (stmt))
1916 tree rhs = gimple_assign_rhs1 (stmt);
1918 /* If the RHS of the assignment is a constant or another variable that
1919 may be propagated, register it in the CONST_AND_COPIES table. We
1920 do not need to record unwind data for this, since this is a true
1921 assignment and not an equivalence inferred from a comparison. All
1922 uses of this ssa name are dominated by this assignment, so unwinding
1923 just costs time and space. */
1924 if (may_optimize_p
1925 && (TREE_CODE (rhs) == SSA_NAME
1926 || is_gimple_min_invariant (rhs)))
1928 if (dump_file && (dump_flags & TDF_DETAILS))
1930 fprintf (dump_file, "==== ASGN ");
1931 print_generic_expr (dump_file, lhs, 0);
1932 fprintf (dump_file, " = ");
1933 print_generic_expr (dump_file, rhs, 0);
1934 fprintf (dump_file, "\n");
1937 set_ssa_name_value (lhs, rhs);
1941 /* A memory store, even an aliased store, creates a useful
1942 equivalence. By exchanging the LHS and RHS, creating suitable
1943 vops and recording the result in the available expression table,
1944 we may be able to expose more redundant loads. */
1945 if (!gimple_has_volatile_ops (stmt)
1946 && gimple_references_memory_p (stmt)
1947 && gimple_assign_single_p (stmt)
1948 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1949 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1950 && !is_gimple_reg (lhs))
1952 tree rhs = gimple_assign_rhs1 (stmt);
1953 gimple new_stmt;
1955 /* Build a new statement with the RHS and LHS exchanged. */
1956 if (TREE_CODE (rhs) == SSA_NAME)
1958 /* NOTE tuples. The call to gimple_build_assign below replaced
1959 a call to build_gimple_modify_stmt, which did not set the
1960 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1961 may cause an SSA validation failure, as the LHS may be a
1962 default-initialized name and should have no definition. I'm
1963 a bit dubious of this, as the artificial statement that we
1964 generate here may in fact be ill-formed, but it is simply
1965 used as an internal device in this pass, and never becomes
1966 part of the CFG. */
1967 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1968 new_stmt = gimple_build_assign (rhs, lhs);
1969 SSA_NAME_DEF_STMT (rhs) = defstmt;
1971 else
1972 new_stmt = gimple_build_assign (rhs, lhs);
1974 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1976 /* Finally enter the statement into the available expression
1977 table. */
1978 lookup_avail_expr (new_stmt, true);
1982 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1983 CONST_AND_COPIES. */
1985 static void
1986 cprop_operand (gimple stmt, use_operand_p op_p)
1988 tree val;
1989 tree op = USE_FROM_PTR (op_p);
1991 /* If the operand has a known constant value or it is known to be a
1992 copy of some other variable, use the value or copy stored in
1993 CONST_AND_COPIES. */
1994 val = SSA_NAME_VALUE (op);
1995 if (val && val != op)
1997 /* Do not change the base variable in the virtual operand
1998 tables. That would make it impossible to reconstruct
1999 the renamed virtual operand if we later modify this
2000 statement. Also only allow the new value to be an SSA_NAME
2001 for propagation into virtual operands. */
2002 if (!is_gimple_reg (op)
2003 && (TREE_CODE (val) != SSA_NAME
2004 || is_gimple_reg (val)
2005 || get_virtual_var (val) != get_virtual_var (op)))
2006 return;
2008 /* Do not replace hard register operands in asm statements. */
2009 if (gimple_code (stmt) == GIMPLE_ASM
2010 && !may_propagate_copy_into_asm (op))
2011 return;
2013 /* Certain operands are not allowed to be copy propagated due
2014 to their interaction with exception handling and some GCC
2015 extensions. */
2016 if (!may_propagate_copy (op, val))
2017 return;
2019 /* Do not propagate addresses that point to volatiles into memory
2020 stmts without volatile operands. */
2021 if (POINTER_TYPE_P (TREE_TYPE (val))
2022 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2023 && gimple_has_mem_ops (stmt)
2024 && !gimple_has_volatile_ops (stmt))
2025 return;
2027 /* Do not propagate copies if the propagated value is at a deeper loop
2028 depth than the propagatee. Otherwise, this may move loop variant
2029 variables outside of their loops and prevent coalescing
2030 opportunities. If the value was loop invariant, it will be hoisted
2031 by LICM and exposed for copy propagation. */
2032 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2033 return;
2035 /* Do not propagate copies into simple IV increment statements.
2036 See PR23821 for how this can disturb IV analysis. */
2037 if (TREE_CODE (val) != INTEGER_CST
2038 && simple_iv_increment_p (stmt))
2039 return;
2041 /* Dump details. */
2042 if (dump_file && (dump_flags & TDF_DETAILS))
2044 fprintf (dump_file, " Replaced '");
2045 print_generic_expr (dump_file, op, dump_flags);
2046 fprintf (dump_file, "' with %s '",
2047 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2048 print_generic_expr (dump_file, val, dump_flags);
2049 fprintf (dump_file, "'\n");
2052 if (TREE_CODE (val) != SSA_NAME)
2053 opt_stats.num_const_prop++;
2054 else
2055 opt_stats.num_copy_prop++;
2057 propagate_value (op_p, val);
2059 /* And note that we modified this statement. This is now
2060 safe, even if we changed virtual operands since we will
2061 rescan the statement and rewrite its operands again. */
2062 gimple_set_modified (stmt, true);
2066 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2067 known value for that SSA_NAME (or NULL if no value is known).
2069 Propagate values from CONST_AND_COPIES into the uses, vuses and
2070 vdef_ops of STMT. */
2072 static void
2073 cprop_into_stmt (gimple stmt)
2075 use_operand_p op_p;
2076 ssa_op_iter iter;
2078 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2080 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2081 cprop_operand (stmt, op_p);
2085 /* Optimize the statement pointed to by iterator SI.
2087 We try to perform some simplistic global redundancy elimination and
2088 constant propagation:
2090 1- To detect global redundancy, we keep track of expressions that have
2091 been computed in this block and its dominators. If we find that the
2092 same expression is computed more than once, we eliminate repeated
2093 computations by using the target of the first one.
2095 2- Constant values and copy assignments. This is used to do very
2096 simplistic constant and copy propagation. When a constant or copy
2097 assignment is found, we map the value on the RHS of the assignment to
2098 the variable in the LHS in the CONST_AND_COPIES table. */
2100 static void
2101 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2103 gimple stmt, old_stmt;
2104 bool may_optimize_p;
2105 bool modified_p = false;
2107 old_stmt = stmt = gsi_stmt (si);
2109 if (gimple_code (stmt) == GIMPLE_COND)
2110 canonicalize_comparison (stmt);
2112 update_stmt_if_modified (stmt);
2113 opt_stats.num_stmts++;
2115 if (dump_file && (dump_flags & TDF_DETAILS))
2117 fprintf (dump_file, "Optimizing statement ");
2118 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2121 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2122 cprop_into_stmt (stmt);
2124 /* If the statement has been modified with constant replacements,
2125 fold its RHS before checking for redundant computations. */
2126 if (gimple_modified_p (stmt))
2128 tree rhs = NULL;
2130 /* Try to fold the statement making sure that STMT is kept
2131 up to date. */
2132 if (fold_stmt (&si))
2134 stmt = gsi_stmt (si);
2135 gimple_set_modified (stmt, true);
2137 if (dump_file && (dump_flags & TDF_DETAILS))
2139 fprintf (dump_file, " Folded to: ");
2140 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2144 /* We only need to consider cases that can yield a gimple operand. */
2145 if (gimple_assign_single_p (stmt))
2146 rhs = gimple_assign_rhs1 (stmt);
2147 else if (gimple_code (stmt) == GIMPLE_GOTO)
2148 rhs = gimple_goto_dest (stmt);
2149 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2150 /* This should never be an ADDR_EXPR. */
2151 rhs = gimple_switch_index (stmt);
2153 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2154 recompute_tree_invariant_for_addr_expr (rhs);
2156 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2157 even if fold_stmt updated the stmt already and thus cleared
2158 gimple_modified_p flag on it. */
2159 modified_p = true;
2162 /* Check for redundant computations. Do this optimization only
2163 for assignments that have no volatile ops and conditionals. */
2164 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2165 && ((is_gimple_assign (stmt)
2166 && !gimple_rhs_has_side_effects (stmt))
2167 || (is_gimple_call (stmt)
2168 && gimple_call_lhs (stmt) != NULL_TREE
2169 && !gimple_rhs_has_side_effects (stmt))
2170 || gimple_code (stmt) == GIMPLE_COND
2171 || gimple_code (stmt) == GIMPLE_SWITCH));
2173 if (may_optimize_p)
2175 if (gimple_code (stmt) == GIMPLE_CALL)
2177 /* Resolve __builtin_constant_p. If it hasn't been
2178 folded to integer_one_node by now, it's fairly
2179 certain that the value simply isn't constant. */
2180 tree callee = gimple_call_fndecl (stmt);
2181 if (callee
2182 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2183 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2185 propagate_tree_value_into_stmt (&si, integer_zero_node);
2186 stmt = gsi_stmt (si);
2190 update_stmt_if_modified (stmt);
2191 eliminate_redundant_computations (&si);
2192 stmt = gsi_stmt (si);
2194 /* Perform simple redundant store elimination. */
2195 if (gimple_assign_single_p (stmt)
2196 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2198 tree lhs = gimple_assign_lhs (stmt);
2199 tree rhs = gimple_assign_rhs1 (stmt);
2200 tree cached_lhs;
2201 gimple new_stmt;
2202 if (TREE_CODE (rhs) == SSA_NAME)
2204 tree tem = SSA_NAME_VALUE (rhs);
2205 if (tem)
2206 rhs = tem;
2208 /* Build a new statement with the RHS and LHS exchanged. */
2209 if (TREE_CODE (rhs) == SSA_NAME)
2211 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2212 new_stmt = gimple_build_assign (rhs, lhs);
2213 SSA_NAME_DEF_STMT (rhs) = defstmt;
2215 else
2216 new_stmt = gimple_build_assign (rhs, lhs);
2217 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2218 cached_lhs = lookup_avail_expr (new_stmt, false);
2219 if (cached_lhs
2220 && rhs == cached_lhs)
2222 basic_block bb = gimple_bb (stmt);
2223 int lp_nr = lookup_stmt_eh_lp (stmt);
2224 unlink_stmt_vdef (stmt);
2225 gsi_remove (&si, true);
2226 if (lp_nr != 0)
2228 bitmap_set_bit (need_eh_cleanup, bb->index);
2229 if (dump_file && (dump_flags & TDF_DETAILS))
2230 fprintf (dump_file, " Flagged to clear EH edges.\n");
2232 return;
2237 /* Record any additional equivalences created by this statement. */
2238 if (is_gimple_assign (stmt))
2239 record_equivalences_from_stmt (stmt, may_optimize_p);
2241 /* If STMT is a COND_EXPR and it was modified, then we may know
2242 where it goes. If that is the case, then mark the CFG as altered.
2244 This will cause us to later call remove_unreachable_blocks and
2245 cleanup_tree_cfg when it is safe to do so. It is not safe to
2246 clean things up here since removal of edges and such can trigger
2247 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2248 the manager.
2250 That's all fine and good, except that once SSA_NAMEs are released
2251 to the manager, we must not call create_ssa_name until all references
2252 to released SSA_NAMEs have been eliminated.
2254 All references to the deleted SSA_NAMEs can not be eliminated until
2255 we remove unreachable blocks.
2257 We can not remove unreachable blocks until after we have completed
2258 any queued jump threading.
2260 We can not complete any queued jump threads until we have taken
2261 appropriate variables out of SSA form. Taking variables out of
2262 SSA form can call create_ssa_name and thus we lose.
2264 Ultimately I suspect we're going to need to change the interface
2265 into the SSA_NAME manager. */
2266 if (gimple_modified_p (stmt) || modified_p)
2268 tree val = NULL;
2270 update_stmt_if_modified (stmt);
2272 if (gimple_code (stmt) == GIMPLE_COND)
2273 val = fold_binary_loc (gimple_location (stmt),
2274 gimple_cond_code (stmt), boolean_type_node,
2275 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2276 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2277 val = gimple_switch_index (stmt);
2279 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2280 cfg_altered = true;
2282 /* If we simplified a statement in such a way as to be shown that it
2283 cannot trap, update the eh information and the cfg to match. */
2284 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2286 bitmap_set_bit (need_eh_cleanup, bb->index);
2287 if (dump_file && (dump_flags & TDF_DETAILS))
2288 fprintf (dump_file, " Flagged to clear EH edges.\n");
2293 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2294 If found, return its LHS. Otherwise insert STMT in the table and
2295 return NULL_TREE.
2297 Also, when an expression is first inserted in the table, it is also
2298 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2299 we finish processing this block and its children. */
2301 static tree
2302 lookup_avail_expr (gimple stmt, bool insert)
2304 void **slot;
2305 tree lhs;
2306 tree temp;
2307 struct expr_hash_elt element;
2309 /* Get LHS of assignment or call, else NULL_TREE. */
2310 lhs = gimple_get_lhs (stmt);
2312 initialize_hash_element (stmt, lhs, &element);
2314 if (dump_file && (dump_flags & TDF_DETAILS))
2316 fprintf (dump_file, "LKUP ");
2317 print_expr_hash_elt (dump_file, &element);
2320 /* Don't bother remembering constant assignments and copy operations.
2321 Constants and copy operations are handled by the constant/copy propagator
2322 in optimize_stmt. */
2323 if (element.expr.kind == EXPR_SINGLE
2324 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2325 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2326 return NULL_TREE;
2328 /* Finally try to find the expression in the main expression hash table. */
2329 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2330 (insert ? INSERT : NO_INSERT));
2331 if (slot == NULL)
2332 return NULL_TREE;
2334 if (*slot == NULL)
2336 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2337 *element2 = element;
2338 element2->stamp = element2;
2339 *slot = (void *) element2;
2341 if (dump_file && (dump_flags & TDF_DETAILS))
2343 fprintf (dump_file, "2>>> ");
2344 print_expr_hash_elt (dump_file, element2);
2347 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
2348 return NULL_TREE;
2351 /* Extract the LHS of the assignment so that it can be used as the current
2352 definition of another variable. */
2353 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2355 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2356 use the value from the const_and_copies table. */
2357 if (TREE_CODE (lhs) == SSA_NAME)
2359 temp = SSA_NAME_VALUE (lhs);
2360 if (temp)
2361 lhs = temp;
2364 if (dump_file && (dump_flags & TDF_DETAILS))
2366 fprintf (dump_file, "FIND: ");
2367 print_generic_expr (dump_file, lhs, 0);
2368 fprintf (dump_file, "\n");
2371 return lhs;
2374 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2375 for expressions using the code of the expression and the SSA numbers of
2376 its operands. */
2378 static hashval_t
2379 avail_expr_hash (const void *p)
2381 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2382 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2383 tree vuse;
2384 hashval_t val = 0;
2386 val = iterative_hash_hashable_expr (expr, val);
2388 /* If the hash table entry is not associated with a statement, then we
2389 can just hash the expression and not worry about virtual operands
2390 and such. */
2391 if (!stmt)
2392 return val;
2394 /* Add the SSA version numbers of the vuse operand. This is important
2395 because compound variables like arrays are not renamed in the
2396 operands. Rather, the rename is done on the virtual variable
2397 representing all the elements of the array. */
2398 if ((vuse = gimple_vuse (stmt)))
2399 val = iterative_hash_expr (vuse, val);
2401 return val;
2404 static hashval_t
2405 real_avail_expr_hash (const void *p)
2407 return ((const struct expr_hash_elt *)p)->hash;
2410 static int
2411 avail_expr_eq (const void *p1, const void *p2)
2413 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2414 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2415 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2416 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2417 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2418 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2420 /* This case should apply only when removing entries from the table. */
2421 if (stamp1 == stamp2)
2422 return true;
2424 /* FIXME tuples:
2425 We add stmts to a hash table and them modify them. To detect the case
2426 that we modify a stmt and then search for it, we assume that the hash
2427 is always modified by that change.
2428 We have to fully check why this doesn't happen on trunk or rewrite
2429 this in a more reliable (and easier to understand) way. */
2430 if (((const struct expr_hash_elt *)p1)->hash
2431 != ((const struct expr_hash_elt *)p2)->hash)
2432 return false;
2434 /* In case of a collision, both RHS have to be identical and have the
2435 same VUSE operands. */
2436 if (hashable_expr_equal_p (expr1, expr2)
2437 && types_compatible_p (expr1->type, expr2->type))
2439 /* Note that STMT1 and/or STMT2 may be NULL. */
2440 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2441 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2444 return false;
2447 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2448 up degenerate PHIs created by or exposed by jump threading. */
2450 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2451 NULL. */
2453 tree
2454 degenerate_phi_result (gimple phi)
2456 tree lhs = gimple_phi_result (phi);
2457 tree val = NULL;
2458 size_t i;
2460 /* Ignoring arguments which are the same as LHS, if all the remaining
2461 arguments are the same, then the PHI is a degenerate and has the
2462 value of that common argument. */
2463 for (i = 0; i < gimple_phi_num_args (phi); i++)
2465 tree arg = gimple_phi_arg_def (phi, i);
2467 if (arg == lhs)
2468 continue;
2469 else if (!arg)
2470 break;
2471 else if (!val)
2472 val = arg;
2473 else if (arg == val)
2474 continue;
2475 /* We bring in some of operand_equal_p not only to speed things
2476 up, but also to avoid crashing when dereferencing the type of
2477 a released SSA name. */
2478 else if (TREE_CODE (val) != TREE_CODE (arg)
2479 || TREE_CODE (val) == SSA_NAME
2480 || !operand_equal_p (arg, val, 0))
2481 break;
2483 return (i == gimple_phi_num_args (phi) ? val : NULL);
2486 /* Given a statement STMT, which is either a PHI node or an assignment,
2487 remove it from the IL. */
2489 static void
2490 remove_stmt_or_phi (gimple stmt)
2492 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2494 if (gimple_code (stmt) == GIMPLE_PHI)
2495 remove_phi_node (&gsi, true);
2496 else
2498 gsi_remove (&gsi, true);
2499 release_defs (stmt);
2503 /* Given a statement STMT, which is either a PHI node or an assignment,
2504 return the "rhs" of the node, in the case of a non-degenerate
2505 phi, NULL is returned. */
2507 static tree
2508 get_rhs_or_phi_arg (gimple stmt)
2510 if (gimple_code (stmt) == GIMPLE_PHI)
2511 return degenerate_phi_result (stmt);
2512 else if (gimple_assign_single_p (stmt))
2513 return gimple_assign_rhs1 (stmt);
2514 else
2515 gcc_unreachable ();
2519 /* Given a statement STMT, which is either a PHI node or an assignment,
2520 return the "lhs" of the node. */
2522 static tree
2523 get_lhs_or_phi_result (gimple stmt)
2525 if (gimple_code (stmt) == GIMPLE_PHI)
2526 return gimple_phi_result (stmt);
2527 else if (is_gimple_assign (stmt))
2528 return gimple_assign_lhs (stmt);
2529 else
2530 gcc_unreachable ();
2533 /* Propagate RHS into all uses of LHS (when possible).
2535 RHS and LHS are derived from STMT, which is passed in solely so
2536 that we can remove it if propagation is successful.
2538 When propagating into a PHI node or into a statement which turns
2539 into a trivial copy or constant initialization, set the
2540 appropriate bit in INTERESTING_NAMEs so that we will visit those
2541 nodes as well in an effort to pick up secondary optimization
2542 opportunities. */
2544 static void
2545 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2547 /* First verify that propagation is valid and isn't going to move a
2548 loop variant variable outside its loop. */
2549 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2550 && (TREE_CODE (rhs) != SSA_NAME
2551 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2552 && may_propagate_copy (lhs, rhs)
2553 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2555 use_operand_p use_p;
2556 imm_use_iterator iter;
2557 gimple use_stmt;
2558 bool all = true;
2560 /* Dump details. */
2561 if (dump_file && (dump_flags & TDF_DETAILS))
2563 fprintf (dump_file, " Replacing '");
2564 print_generic_expr (dump_file, lhs, dump_flags);
2565 fprintf (dump_file, "' with %s '",
2566 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2567 print_generic_expr (dump_file, rhs, dump_flags);
2568 fprintf (dump_file, "'\n");
2571 /* Walk over every use of LHS and try to replace the use with RHS.
2572 At this point the only reason why such a propagation would not
2573 be successful would be if the use occurs in an ASM_EXPR. */
2574 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2576 /* Leave debug stmts alone. If we succeed in propagating
2577 all non-debug uses, we'll drop the DEF, and propagation
2578 into debug stmts will occur then. */
2579 if (gimple_debug_bind_p (use_stmt))
2580 continue;
2582 /* It's not always safe to propagate into an ASM_EXPR. */
2583 if (gimple_code (use_stmt) == GIMPLE_ASM
2584 && ! may_propagate_copy_into_asm (lhs))
2586 all = false;
2587 continue;
2590 /* It's not ok to propagate into the definition stmt of RHS.
2591 <bb 9>:
2592 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2593 g_67.1_6 = prephitmp.12_36;
2594 goto <bb 9>;
2595 While this is strictly all dead code we do not want to
2596 deal with this here. */
2597 if (TREE_CODE (rhs) == SSA_NAME
2598 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2600 all = false;
2601 continue;
2604 /* Dump details. */
2605 if (dump_file && (dump_flags & TDF_DETAILS))
2607 fprintf (dump_file, " Original statement:");
2608 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2611 /* Propagate the RHS into this use of the LHS. */
2612 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2613 propagate_value (use_p, rhs);
2615 /* Special cases to avoid useless calls into the folding
2616 routines, operand scanning, etc.
2618 First, propagation into a PHI may cause the PHI to become
2619 a degenerate, so mark the PHI as interesting. No other
2620 actions are necessary.
2622 Second, if we're propagating a virtual operand and the
2623 propagation does not change the underlying _DECL node for
2624 the virtual operand, then no further actions are necessary. */
2625 if (gimple_code (use_stmt) == GIMPLE_PHI
2626 || (! is_gimple_reg (lhs)
2627 && TREE_CODE (rhs) == SSA_NAME
2628 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2630 /* Dump details. */
2631 if (dump_file && (dump_flags & TDF_DETAILS))
2633 fprintf (dump_file, " Updated statement:");
2634 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2637 /* Propagation into a PHI may expose new degenerate PHIs,
2638 so mark the result of the PHI as interesting. */
2639 if (gimple_code (use_stmt) == GIMPLE_PHI)
2641 tree result = get_lhs_or_phi_result (use_stmt);
2642 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2645 continue;
2648 /* From this point onward we are propagating into a
2649 real statement. Folding may (or may not) be possible,
2650 we may expose new operands, expose dead EH edges,
2651 etc. */
2652 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2653 cannot fold a call that simplifies to a constant,
2654 because the GIMPLE_CALL must be replaced by a
2655 GIMPLE_ASSIGN, and there is no way to effect such a
2656 transformation in-place. We might want to consider
2657 using the more general fold_stmt here. */
2658 fold_stmt_inplace (use_stmt);
2660 /* Sometimes propagation can expose new operands to the
2661 renamer. */
2662 update_stmt (use_stmt);
2664 /* Dump details. */
2665 if (dump_file && (dump_flags & TDF_DETAILS))
2667 fprintf (dump_file, " Updated statement:");
2668 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2671 /* If we replaced a variable index with a constant, then
2672 we would need to update the invariant flag for ADDR_EXPRs. */
2673 if (gimple_assign_single_p (use_stmt)
2674 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2675 recompute_tree_invariant_for_addr_expr
2676 (gimple_assign_rhs1 (use_stmt));
2678 /* If we cleaned up EH information from the statement,
2679 mark its containing block as needing EH cleanups. */
2680 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2682 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2683 if (dump_file && (dump_flags & TDF_DETAILS))
2684 fprintf (dump_file, " Flagged to clear EH edges.\n");
2687 /* Propagation may expose new trivial copy/constant propagation
2688 opportunities. */
2689 if (gimple_assign_single_p (use_stmt)
2690 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2691 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2692 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2694 tree result = get_lhs_or_phi_result (use_stmt);
2695 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2698 /* Propagation into these nodes may make certain edges in
2699 the CFG unexecutable. We want to identify them as PHI nodes
2700 at the destination of those unexecutable edges may become
2701 degenerates. */
2702 else if (gimple_code (use_stmt) == GIMPLE_COND
2703 || gimple_code (use_stmt) == GIMPLE_SWITCH
2704 || gimple_code (use_stmt) == GIMPLE_GOTO)
2706 tree val;
2708 if (gimple_code (use_stmt) == GIMPLE_COND)
2709 val = fold_binary_loc (gimple_location (use_stmt),
2710 gimple_cond_code (use_stmt),
2711 boolean_type_node,
2712 gimple_cond_lhs (use_stmt),
2713 gimple_cond_rhs (use_stmt));
2714 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2715 val = gimple_switch_index (use_stmt);
2716 else
2717 val = gimple_goto_dest (use_stmt);
2719 if (val && is_gimple_min_invariant (val))
2721 basic_block bb = gimple_bb (use_stmt);
2722 edge te = find_taken_edge (bb, val);
2723 edge_iterator ei;
2724 edge e;
2725 gimple_stmt_iterator gsi, psi;
2727 /* Remove all outgoing edges except TE. */
2728 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2730 if (e != te)
2732 /* Mark all the PHI nodes at the destination of
2733 the unexecutable edge as interesting. */
2734 for (psi = gsi_start_phis (e->dest);
2735 !gsi_end_p (psi);
2736 gsi_next (&psi))
2738 gimple phi = gsi_stmt (psi);
2740 tree result = gimple_phi_result (phi);
2741 int version = SSA_NAME_VERSION (result);
2743 bitmap_set_bit (interesting_names, version);
2746 te->probability += e->probability;
2748 te->count += e->count;
2749 remove_edge (e);
2750 cfg_altered = true;
2752 else
2753 ei_next (&ei);
2756 gsi = gsi_last_bb (gimple_bb (use_stmt));
2757 gsi_remove (&gsi, true);
2759 /* And fixup the flags on the single remaining edge. */
2760 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2761 te->flags &= ~EDGE_ABNORMAL;
2762 te->flags |= EDGE_FALLTHRU;
2763 if (te->probability > REG_BR_PROB_BASE)
2764 te->probability = REG_BR_PROB_BASE;
2769 /* Ensure there is nothing else to do. */
2770 gcc_assert (!all || has_zero_uses (lhs));
2772 /* If we were able to propagate away all uses of LHS, then
2773 we can remove STMT. */
2774 if (all)
2775 remove_stmt_or_phi (stmt);
2779 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2780 a statement that is a trivial copy or constant initialization.
2782 Attempt to eliminate T by propagating its RHS into all uses of
2783 its LHS. This may in turn set new bits in INTERESTING_NAMES
2784 for nodes we want to revisit later.
2786 All exit paths should clear INTERESTING_NAMES for the result
2787 of STMT. */
2789 static void
2790 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2792 tree lhs = get_lhs_or_phi_result (stmt);
2793 tree rhs;
2794 int version = SSA_NAME_VERSION (lhs);
2796 /* If the LHS of this statement or PHI has no uses, then we can
2797 just eliminate it. This can occur if, for example, the PHI
2798 was created by block duplication due to threading and its only
2799 use was in the conditional at the end of the block which was
2800 deleted. */
2801 if (has_zero_uses (lhs))
2803 bitmap_clear_bit (interesting_names, version);
2804 remove_stmt_or_phi (stmt);
2805 return;
2808 /* Get the RHS of the assignment or PHI node if the PHI is a
2809 degenerate. */
2810 rhs = get_rhs_or_phi_arg (stmt);
2811 if (!rhs)
2813 bitmap_clear_bit (interesting_names, version);
2814 return;
2817 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2819 /* Note that STMT may well have been deleted by now, so do
2820 not access it, instead use the saved version # to clear
2821 T's entry in the worklist. */
2822 bitmap_clear_bit (interesting_names, version);
2825 /* The first phase in degenerate PHI elimination.
2827 Eliminate the degenerate PHIs in BB, then recurse on the
2828 dominator children of BB. */
2830 static void
2831 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2833 gimple_stmt_iterator gsi;
2834 basic_block son;
2836 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2838 gimple phi = gsi_stmt (gsi);
2840 eliminate_const_or_copy (phi, interesting_names);
2843 /* Recurse into the dominator children of BB. */
2844 for (son = first_dom_son (CDI_DOMINATORS, bb);
2845 son;
2846 son = next_dom_son (CDI_DOMINATORS, son))
2847 eliminate_degenerate_phis_1 (son, interesting_names);
2851 /* A very simple pass to eliminate degenerate PHI nodes from the
2852 IL. This is meant to be fast enough to be able to be run several
2853 times in the optimization pipeline.
2855 Certain optimizations, particularly those which duplicate blocks
2856 or remove edges from the CFG can create or expose PHIs which are
2857 trivial copies or constant initializations.
2859 While we could pick up these optimizations in DOM or with the
2860 combination of copy-prop and CCP, those solutions are far too
2861 heavy-weight for our needs.
2863 This implementation has two phases so that we can efficiently
2864 eliminate the first order degenerate PHIs and second order
2865 degenerate PHIs.
2867 The first phase performs a dominator walk to identify and eliminate
2868 the vast majority of the degenerate PHIs. When a degenerate PHI
2869 is identified and eliminated any affected statements or PHIs
2870 are put on a worklist.
2872 The second phase eliminates degenerate PHIs and trivial copies
2873 or constant initializations using the worklist. This is how we
2874 pick up the secondary optimization opportunities with minimal
2875 cost. */
2877 static unsigned int
2878 eliminate_degenerate_phis (void)
2880 bitmap interesting_names;
2881 bitmap interesting_names1;
2883 /* Bitmap of blocks which need EH information updated. We can not
2884 update it on-the-fly as doing so invalidates the dominator tree. */
2885 need_eh_cleanup = BITMAP_ALLOC (NULL);
2887 /* INTERESTING_NAMES is effectively our worklist, indexed by
2888 SSA_NAME_VERSION.
2890 A set bit indicates that the statement or PHI node which
2891 defines the SSA_NAME should be (re)examined to determine if
2892 it has become a degenerate PHI or trivial const/copy propagation
2893 opportunity.
2895 Experiments have show we generally get better compilation
2896 time behavior with bitmaps rather than sbitmaps. */
2897 interesting_names = BITMAP_ALLOC (NULL);
2898 interesting_names1 = BITMAP_ALLOC (NULL);
2900 calculate_dominance_info (CDI_DOMINATORS);
2901 cfg_altered = false;
2903 /* First phase. Eliminate degenerate PHIs via a dominator
2904 walk of the CFG.
2906 Experiments have indicated that we generally get better
2907 compile-time behavior by visiting blocks in the first
2908 phase in dominator order. Presumably this is because walking
2909 in dominator order leaves fewer PHIs for later examination
2910 by the worklist phase. */
2911 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2913 /* Second phase. Eliminate second order degenerate PHIs as well
2914 as trivial copies or constant initializations identified by
2915 the first phase or this phase. Basically we keep iterating
2916 until our set of INTERESTING_NAMEs is empty. */
2917 while (!bitmap_empty_p (interesting_names))
2919 unsigned int i;
2920 bitmap_iterator bi;
2922 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2923 changed during the loop. Copy it to another bitmap and
2924 use that. */
2925 bitmap_copy (interesting_names1, interesting_names);
2927 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2929 tree name = ssa_name (i);
2931 /* Ignore SSA_NAMEs that have been released because
2932 their defining statement was deleted (unreachable). */
2933 if (name)
2934 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2935 interesting_names);
2939 if (cfg_altered)
2940 free_dominance_info (CDI_DOMINATORS);
2942 /* Propagation of const and copies may make some EH edges dead. Purge
2943 such edges from the CFG as needed. */
2944 if (!bitmap_empty_p (need_eh_cleanup))
2946 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2947 BITMAP_FREE (need_eh_cleanup);
2950 BITMAP_FREE (interesting_names);
2951 BITMAP_FREE (interesting_names1);
2952 return 0;
2955 struct gimple_opt_pass pass_phi_only_cprop =
2958 GIMPLE_PASS,
2959 "phicprop", /* name */
2960 gate_dominator, /* gate */
2961 eliminate_degenerate_phis, /* execute */
2962 NULL, /* sub */
2963 NULL, /* next */
2964 0, /* static_pass_number */
2965 TV_TREE_PHI_CPROP, /* tv_id */
2966 PROP_cfg | PROP_ssa, /* properties_required */
2967 0, /* properties_provided */
2968 0, /* properties_destroyed */
2969 0, /* todo_flags_start */
2970 TODO_cleanup_cfg
2971 | TODO_ggc_collect
2972 | TODO_verify_ssa
2973 | TODO_verify_stmts
2974 | TODO_update_ssa /* todo_flags_finish */