* config/rx/rx.h (LABEL_ALIGN_FOR_BARRIER): Define.
[official-gcc.git] / gcc / tree-ssa-dom.c
blob0f649f3cfed9f9fcf15b5aa62ee25a203872ad40
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "cfgloop.h"
31 #include "output.h"
32 #include "function.h"
33 #include "tree-pretty-print.h"
34 #include "gimple-pretty-print.h"
35 #include "timevar.h"
36 #include "tree-dump.h"
37 #include "tree-flow.h"
38 #include "domwalk.h"
39 #include "tree-pass.h"
40 #include "tree-ssa-propagate.h"
41 #include "langhooks.h"
42 #include "params.h"
44 /* This file implements optimizations on the dominator tree. */
46 /* Representation of a "naked" right-hand-side expression, to be used
47 in recording available expressions in the expression hash table. */
49 enum expr_kind
51 EXPR_SINGLE,
52 EXPR_UNARY,
53 EXPR_BINARY,
54 EXPR_TERNARY,
55 EXPR_CALL
58 struct hashable_expr
60 tree type;
61 enum expr_kind kind;
62 union {
63 struct { tree rhs; } single;
64 struct { enum tree_code op; tree opnd; } unary;
65 struct { enum tree_code op; tree opnd0, opnd1; } binary;
66 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
67 struct { tree fn; bool pure; size_t nargs; tree *args; } call;
68 } ops;
71 /* Structure for recording known values of a conditional expression
72 at the exits from its block. */
74 typedef struct cond_equivalence_s
76 struct hashable_expr cond;
77 tree value;
78 } cond_equivalence;
80 DEF_VEC_O(cond_equivalence);
81 DEF_VEC_ALLOC_O(cond_equivalence,heap);
83 /* Structure for recording edge equivalences as well as any pending
84 edge redirections during the dominator optimizer.
86 Computing and storing the edge equivalences instead of creating
87 them on-demand can save significant amounts of time, particularly
88 for pathological cases involving switch statements.
90 These structures live for a single iteration of the dominator
91 optimizer in the edge's AUX field. At the end of an iteration we
92 free each of these structures and update the AUX field to point
93 to any requested redirection target (the code for updating the
94 CFG and SSA graph for edge redirection expects redirection edge
95 targets to be in the AUX field for each edge. */
97 struct edge_info
99 /* If this edge creates a simple equivalence, the LHS and RHS of
100 the equivalence will be stored here. */
101 tree lhs;
102 tree rhs;
104 /* Traversing an edge may also indicate one or more particular conditions
105 are true or false. */
106 VEC(cond_equivalence, heap) *cond_equivalences;
109 /* Hash table with expressions made available during the renaming process.
110 When an assignment of the form X_i = EXPR is found, the statement is
111 stored in this table. If the same expression EXPR is later found on the
112 RHS of another statement, it is replaced with X_i (thus performing
113 global redundancy elimination). Similarly as we pass through conditionals
114 we record the conditional itself as having either a true or false value
115 in this table. */
116 static htab_t avail_exprs;
118 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
119 expressions it enters into the hash table along with a marker entry
120 (null). When we finish processing the block, we pop off entries and
121 remove the expressions from the global hash table until we hit the
122 marker. */
123 typedef struct expr_hash_elt * expr_hash_elt_t;
124 DEF_VEC_P(expr_hash_elt_t);
125 DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
127 static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
129 /* Structure for entries in the expression hash table. */
131 struct expr_hash_elt
133 /* The value (lhs) of this expression. */
134 tree lhs;
136 /* The expression (rhs) we want to record. */
137 struct hashable_expr expr;
139 /* The stmt pointer if this element corresponds to a statement. */
140 gimple stmt;
142 /* The hash value for RHS. */
143 hashval_t hash;
145 /* A unique stamp, typically the address of the hash
146 element itself, used in removing entries from the table. */
147 struct expr_hash_elt *stamp;
150 /* Stack of dest,src pairs that need to be restored during finalization.
152 A NULL entry is used to mark the end of pairs which need to be
153 restored during finalization of this block. */
154 static VEC(tree,heap) *const_and_copies_stack;
156 /* Track whether or not we have changed the control flow graph. */
157 static bool cfg_altered;
159 /* Bitmap of blocks that have had EH statements cleaned. We should
160 remove their dead edges eventually. */
161 static bitmap need_eh_cleanup;
163 /* Statistics for dominator optimizations. */
164 struct opt_stats_d
166 long num_stmts;
167 long num_exprs_considered;
168 long num_re;
169 long num_const_prop;
170 long num_copy_prop;
173 static struct opt_stats_d opt_stats;
175 /* Local functions. */
176 static void optimize_stmt (basic_block, gimple_stmt_iterator);
177 static tree lookup_avail_expr (gimple, bool);
178 static hashval_t avail_expr_hash (const void *);
179 static hashval_t real_avail_expr_hash (const void *);
180 static int avail_expr_eq (const void *, const void *);
181 static void htab_statistics (FILE *, htab_t);
182 static void record_cond (cond_equivalence *);
183 static void record_const_or_copy (tree, tree);
184 static void record_equality (tree, tree);
185 static void record_equivalences_from_phis (basic_block);
186 static void record_equivalences_from_incoming_edge (basic_block);
187 static void eliminate_redundant_computations (gimple_stmt_iterator *);
188 static void record_equivalences_from_stmt (gimple, int);
189 static void dom_thread_across_edge (struct dom_walk_data *, edge);
190 static void dom_opt_leave_block (struct dom_walk_data *, basic_block);
191 static void dom_opt_enter_block (struct dom_walk_data *, basic_block);
192 static void remove_local_expressions_from_table (void);
193 static void restore_vars_to_original_value (void);
194 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
197 /* Given a statement STMT, initialize the hash table element pointed to
198 by ELEMENT. */
200 static void
201 initialize_hash_element (gimple stmt, tree lhs,
202 struct expr_hash_elt *element)
204 enum gimple_code code = gimple_code (stmt);
205 struct hashable_expr *expr = &element->expr;
207 if (code == GIMPLE_ASSIGN)
209 enum tree_code subcode = gimple_assign_rhs_code (stmt);
211 expr->type = NULL_TREE;
213 switch (get_gimple_rhs_class (subcode))
215 case GIMPLE_SINGLE_RHS:
216 expr->kind = EXPR_SINGLE;
217 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
218 break;
219 case GIMPLE_UNARY_RHS:
220 expr->kind = EXPR_UNARY;
221 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
222 expr->ops.unary.op = subcode;
223 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
224 break;
225 case GIMPLE_BINARY_RHS:
226 expr->kind = EXPR_BINARY;
227 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
228 expr->ops.binary.op = subcode;
229 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
230 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
231 break;
232 case GIMPLE_TERNARY_RHS:
233 expr->kind = EXPR_TERNARY;
234 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
235 expr->ops.ternary.op = subcode;
236 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
237 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
238 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
239 break;
240 default:
241 gcc_unreachable ();
244 else if (code == GIMPLE_COND)
246 expr->type = boolean_type_node;
247 expr->kind = EXPR_BINARY;
248 expr->ops.binary.op = gimple_cond_code (stmt);
249 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
250 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
252 else if (code == GIMPLE_CALL)
254 size_t nargs = gimple_call_num_args (stmt);
255 size_t i;
257 gcc_assert (gimple_call_lhs (stmt));
259 expr->type = TREE_TYPE (gimple_call_lhs (stmt));
260 expr->kind = EXPR_CALL;
261 expr->ops.call.fn = gimple_call_fn (stmt);
263 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
264 expr->ops.call.pure = true;
265 else
266 expr->ops.call.pure = false;
268 expr->ops.call.nargs = nargs;
269 expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
270 for (i = 0; i < nargs; i++)
271 expr->ops.call.args[i] = gimple_call_arg (stmt, i);
273 else if (code == GIMPLE_SWITCH)
275 expr->type = TREE_TYPE (gimple_switch_index (stmt));
276 expr->kind = EXPR_SINGLE;
277 expr->ops.single.rhs = gimple_switch_index (stmt);
279 else if (code == GIMPLE_GOTO)
281 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
282 expr->kind = EXPR_SINGLE;
283 expr->ops.single.rhs = gimple_goto_dest (stmt);
285 else
286 gcc_unreachable ();
288 element->lhs = lhs;
289 element->stmt = stmt;
290 element->hash = avail_expr_hash (element);
291 element->stamp = element;
294 /* Given a conditional expression COND as a tree, initialize
295 a hashable_expr expression EXPR. The conditional must be a
296 comparison or logical negation. A constant or a variable is
297 not permitted. */
299 static void
300 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
302 expr->type = boolean_type_node;
304 if (COMPARISON_CLASS_P (cond))
306 expr->kind = EXPR_BINARY;
307 expr->ops.binary.op = TREE_CODE (cond);
308 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
309 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
311 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
313 expr->kind = EXPR_UNARY;
314 expr->ops.unary.op = TRUTH_NOT_EXPR;
315 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
317 else
318 gcc_unreachable ();
321 /* Given a hashable_expr expression EXPR and an LHS,
322 initialize the hash table element pointed to by ELEMENT. */
324 static void
325 initialize_hash_element_from_expr (struct hashable_expr *expr,
326 tree lhs,
327 struct expr_hash_elt *element)
329 element->expr = *expr;
330 element->lhs = lhs;
331 element->stmt = NULL;
332 element->hash = avail_expr_hash (element);
333 element->stamp = element;
336 /* Compare two hashable_expr structures for equivalence.
337 They are considered equivalent when the the expressions
338 they denote must necessarily be equal. The logic is intended
339 to follow that of operand_equal_p in fold-const.c */
341 static bool
342 hashable_expr_equal_p (const struct hashable_expr *expr0,
343 const struct hashable_expr *expr1)
345 tree type0 = expr0->type;
346 tree type1 = expr1->type;
348 /* If either type is NULL, there is nothing to check. */
349 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
350 return false;
352 /* If both types don't have the same signedness, precision, and mode,
353 then we can't consider them equal. */
354 if (type0 != type1
355 && (TREE_CODE (type0) == ERROR_MARK
356 || TREE_CODE (type1) == ERROR_MARK
357 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
358 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
359 || TYPE_MODE (type0) != TYPE_MODE (type1)))
360 return false;
362 if (expr0->kind != expr1->kind)
363 return false;
365 switch (expr0->kind)
367 case EXPR_SINGLE:
368 return operand_equal_p (expr0->ops.single.rhs,
369 expr1->ops.single.rhs, 0);
371 case EXPR_UNARY:
372 if (expr0->ops.unary.op != expr1->ops.unary.op)
373 return false;
375 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
376 || expr0->ops.unary.op == NON_LVALUE_EXPR)
377 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
378 return false;
380 return operand_equal_p (expr0->ops.unary.opnd,
381 expr1->ops.unary.opnd, 0);
383 case EXPR_BINARY:
384 if (expr0->ops.binary.op != expr1->ops.binary.op)
385 return false;
387 if (operand_equal_p (expr0->ops.binary.opnd0,
388 expr1->ops.binary.opnd0, 0)
389 && operand_equal_p (expr0->ops.binary.opnd1,
390 expr1->ops.binary.opnd1, 0))
391 return true;
393 /* For commutative ops, allow the other order. */
394 return (commutative_tree_code (expr0->ops.binary.op)
395 && operand_equal_p (expr0->ops.binary.opnd0,
396 expr1->ops.binary.opnd1, 0)
397 && operand_equal_p (expr0->ops.binary.opnd1,
398 expr1->ops.binary.opnd0, 0));
400 case EXPR_TERNARY:
401 if (expr0->ops.ternary.op != expr1->ops.ternary.op
402 || !operand_equal_p (expr0->ops.ternary.opnd2,
403 expr1->ops.ternary.opnd2, 0))
404 return false;
406 if (operand_equal_p (expr0->ops.ternary.opnd0,
407 expr1->ops.ternary.opnd0, 0)
408 && operand_equal_p (expr0->ops.ternary.opnd1,
409 expr1->ops.ternary.opnd1, 0))
410 return true;
412 /* For commutative ops, allow the other order. */
413 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
414 && operand_equal_p (expr0->ops.ternary.opnd0,
415 expr1->ops.ternary.opnd1, 0)
416 && operand_equal_p (expr0->ops.ternary.opnd1,
417 expr1->ops.ternary.opnd0, 0));
419 case EXPR_CALL:
421 size_t i;
423 /* If the calls are to different functions, then they
424 clearly cannot be equal. */
425 if (! operand_equal_p (expr0->ops.call.fn,
426 expr1->ops.call.fn, 0))
427 return false;
429 if (! expr0->ops.call.pure)
430 return false;
432 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
433 return false;
435 for (i = 0; i < expr0->ops.call.nargs; i++)
436 if (! operand_equal_p (expr0->ops.call.args[i],
437 expr1->ops.call.args[i], 0))
438 return false;
440 return true;
443 default:
444 gcc_unreachable ();
448 /* Compute a hash value for a hashable_expr value EXPR and a
449 previously accumulated hash value VAL. If two hashable_expr
450 values compare equal with hashable_expr_equal_p, they must
451 hash to the same value, given an identical value of VAL.
452 The logic is intended to follow iterative_hash_expr in tree.c. */
454 static hashval_t
455 iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
457 switch (expr->kind)
459 case EXPR_SINGLE:
460 val = iterative_hash_expr (expr->ops.single.rhs, val);
461 break;
463 case EXPR_UNARY:
464 val = iterative_hash_object (expr->ops.unary.op, val);
466 /* Make sure to include signedness in the hash computation.
467 Don't hash the type, that can lead to having nodes which
468 compare equal according to operand_equal_p, but which
469 have different hash codes. */
470 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
471 || expr->ops.unary.op == NON_LVALUE_EXPR)
472 val += TYPE_UNSIGNED (expr->type);
474 val = iterative_hash_expr (expr->ops.unary.opnd, val);
475 break;
477 case EXPR_BINARY:
478 val = iterative_hash_object (expr->ops.binary.op, val);
479 if (commutative_tree_code (expr->ops.binary.op))
480 val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
481 expr->ops.binary.opnd1, val);
482 else
484 val = iterative_hash_expr (expr->ops.binary.opnd0, val);
485 val = iterative_hash_expr (expr->ops.binary.opnd1, val);
487 break;
489 case EXPR_TERNARY:
490 val = iterative_hash_object (expr->ops.ternary.op, val);
491 if (commutative_ternary_tree_code (expr->ops.ternary.op))
492 val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
493 expr->ops.ternary.opnd1, val);
494 else
496 val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
497 val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
499 val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
500 break;
502 case EXPR_CALL:
504 size_t i;
505 enum tree_code code = CALL_EXPR;
507 val = iterative_hash_object (code, val);
508 val = iterative_hash_expr (expr->ops.call.fn, val);
509 for (i = 0; i < expr->ops.call.nargs; i++)
510 val = iterative_hash_expr (expr->ops.call.args[i], val);
512 break;
514 default:
515 gcc_unreachable ();
518 return val;
521 /* Print a diagnostic dump of an expression hash table entry. */
523 static void
524 print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
526 if (element->stmt)
527 fprintf (stream, "STMT ");
528 else
529 fprintf (stream, "COND ");
531 if (element->lhs)
533 print_generic_expr (stream, element->lhs, 0);
534 fprintf (stream, " = ");
537 switch (element->expr.kind)
539 case EXPR_SINGLE:
540 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
541 break;
543 case EXPR_UNARY:
544 fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
545 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
546 break;
548 case EXPR_BINARY:
549 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
550 fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
551 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
552 break;
554 case EXPR_TERNARY:
555 fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
556 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
557 fputs (", ", stream);
558 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
559 fputs (", ", stream);
560 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
561 fputs (">", stream);
562 break;
564 case EXPR_CALL:
566 size_t i;
567 size_t nargs = element->expr.ops.call.nargs;
569 print_generic_expr (stream, element->expr.ops.call.fn, 0);
570 fprintf (stream, " (");
571 for (i = 0; i < nargs; i++)
573 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
574 if (i + 1 < nargs)
575 fprintf (stream, ", ");
577 fprintf (stream, ")");
579 break;
581 fprintf (stream, "\n");
583 if (element->stmt)
585 fprintf (stream, " ");
586 print_gimple_stmt (stream, element->stmt, 0, 0);
590 /* Delete an expr_hash_elt and reclaim its storage. */
592 static void
593 free_expr_hash_elt (void *elt)
595 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
597 if (element->expr.kind == EXPR_CALL)
598 free (element->expr.ops.call.args);
600 free (element);
603 /* Allocate an EDGE_INFO for edge E and attach it to E.
604 Return the new EDGE_INFO structure. */
606 static struct edge_info *
607 allocate_edge_info (edge e)
609 struct edge_info *edge_info;
611 edge_info = XCNEW (struct edge_info);
613 e->aux = edge_info;
614 return edge_info;
617 /* Free all EDGE_INFO structures associated with edges in the CFG.
618 If a particular edge can be threaded, copy the redirection
619 target from the EDGE_INFO structure into the edge's AUX field
620 as required by code to update the CFG and SSA graph for
621 jump threading. */
623 static void
624 free_all_edge_infos (void)
626 basic_block bb;
627 edge_iterator ei;
628 edge e;
630 FOR_EACH_BB (bb)
632 FOR_EACH_EDGE (e, ei, bb->preds)
634 struct edge_info *edge_info = (struct edge_info *) e->aux;
636 if (edge_info)
638 if (edge_info->cond_equivalences)
639 VEC_free (cond_equivalence, heap, edge_info->cond_equivalences);
640 free (edge_info);
641 e->aux = NULL;
647 /* Jump threading, redundancy elimination and const/copy propagation.
649 This pass may expose new symbols that need to be renamed into SSA. For
650 every new symbol exposed, its corresponding bit will be set in
651 VARS_TO_RENAME. */
653 static unsigned int
654 tree_ssa_dominator_optimize (void)
656 struct dom_walk_data walk_data;
658 memset (&opt_stats, 0, sizeof (opt_stats));
660 /* Create our hash tables. */
661 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
662 avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
663 const_and_copies_stack = VEC_alloc (tree, heap, 20);
664 need_eh_cleanup = BITMAP_ALLOC (NULL);
666 /* Setup callbacks for the generic dominator tree walker. */
667 walk_data.dom_direction = CDI_DOMINATORS;
668 walk_data.initialize_block_local_data = NULL;
669 walk_data.before_dom_children = dom_opt_enter_block;
670 walk_data.after_dom_children = dom_opt_leave_block;
671 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
672 When we attach more stuff we'll need to fill this out with a real
673 structure. */
674 walk_data.global_data = NULL;
675 walk_data.block_local_data_size = 0;
677 /* Now initialize the dominator walker. */
678 init_walk_dominator_tree (&walk_data);
680 calculate_dominance_info (CDI_DOMINATORS);
681 cfg_altered = false;
683 /* We need to know loop structures in order to avoid destroying them
684 in jump threading. Note that we still can e.g. thread through loop
685 headers to an exit edge, or through loop header to the loop body, assuming
686 that we update the loop info. */
687 loop_optimizer_init (LOOPS_HAVE_SIMPLE_LATCHES);
689 /* Initialize the value-handle array. */
690 threadedge_initialize_values ();
692 /* We need accurate information regarding back edges in the CFG
693 for jump threading; this may include back edges that are not part of
694 a single loop. */
695 mark_dfs_back_edges ();
697 /* Recursively walk the dominator tree optimizing statements. */
698 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
701 gimple_stmt_iterator gsi;
702 basic_block bb;
703 FOR_EACH_BB (bb)
705 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
706 update_stmt_if_modified (gsi_stmt (gsi));
710 /* If we exposed any new variables, go ahead and put them into
711 SSA form now, before we handle jump threading. This simplifies
712 interactions between rewriting of _DECL nodes into SSA form
713 and rewriting SSA_NAME nodes into SSA form after block
714 duplication and CFG manipulation. */
715 update_ssa (TODO_update_ssa);
717 free_all_edge_infos ();
719 /* Thread jumps, creating duplicate blocks as needed. */
720 cfg_altered |= thread_through_all_blocks (first_pass_instance);
722 if (cfg_altered)
723 free_dominance_info (CDI_DOMINATORS);
725 /* Removal of statements may make some EH edges dead. Purge
726 such edges from the CFG as needed. */
727 if (!bitmap_empty_p (need_eh_cleanup))
729 unsigned i;
730 bitmap_iterator bi;
732 /* Jump threading may have created forwarder blocks from blocks
733 needing EH cleanup; the new successor of these blocks, which
734 has inherited from the original block, needs the cleanup. */
735 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
737 basic_block bb = BASIC_BLOCK (i);
738 if (bb
739 && single_succ_p (bb)
740 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
742 bitmap_clear_bit (need_eh_cleanup, i);
743 bitmap_set_bit (need_eh_cleanup, single_succ (bb)->index);
747 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
748 bitmap_zero (need_eh_cleanup);
751 statistics_counter_event (cfun, "Redundant expressions eliminated",
752 opt_stats.num_re);
753 statistics_counter_event (cfun, "Constants propagated",
754 opt_stats.num_const_prop);
755 statistics_counter_event (cfun, "Copies propagated",
756 opt_stats.num_copy_prop);
758 /* Debugging dumps. */
759 if (dump_file && (dump_flags & TDF_STATS))
760 dump_dominator_optimization_stats (dump_file);
762 loop_optimizer_finalize ();
764 /* Delete our main hashtable. */
765 htab_delete (avail_exprs);
767 /* And finalize the dominator walker. */
768 fini_walk_dominator_tree (&walk_data);
770 /* Free asserted bitmaps and stacks. */
771 BITMAP_FREE (need_eh_cleanup);
773 VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
774 VEC_free (tree, heap, const_and_copies_stack);
776 /* Free the value-handle array. */
777 threadedge_finalize_values ();
778 ssa_name_values = NULL;
780 return 0;
783 static bool
784 gate_dominator (void)
786 return flag_tree_dom != 0;
789 struct gimple_opt_pass pass_dominator =
792 GIMPLE_PASS,
793 "dom", /* name */
794 gate_dominator, /* gate */
795 tree_ssa_dominator_optimize, /* execute */
796 NULL, /* sub */
797 NULL, /* next */
798 0, /* static_pass_number */
799 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
800 PROP_cfg | PROP_ssa, /* properties_required */
801 0, /* properties_provided */
802 0, /* properties_destroyed */
803 0, /* todo_flags_start */
804 TODO_cleanup_cfg
805 | TODO_update_ssa
806 | TODO_verify_ssa
807 | TODO_verify_flow
808 | TODO_dump_func /* todo_flags_finish */
813 /* Given a conditional statement CONDSTMT, convert the
814 condition to a canonical form. */
816 static void
817 canonicalize_comparison (gimple condstmt)
819 tree op0;
820 tree op1;
821 enum tree_code code;
823 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
825 op0 = gimple_cond_lhs (condstmt);
826 op1 = gimple_cond_rhs (condstmt);
828 code = gimple_cond_code (condstmt);
830 /* If it would be profitable to swap the operands, then do so to
831 canonicalize the statement, enabling better optimization.
833 By placing canonicalization of such expressions here we
834 transparently keep statements in canonical form, even
835 when the statement is modified. */
836 if (tree_swap_operands_p (op0, op1, false))
838 /* For relationals we need to swap the operands
839 and change the code. */
840 if (code == LT_EXPR
841 || code == GT_EXPR
842 || code == LE_EXPR
843 || code == GE_EXPR)
845 code = swap_tree_comparison (code);
847 gimple_cond_set_code (condstmt, code);
848 gimple_cond_set_lhs (condstmt, op1);
849 gimple_cond_set_rhs (condstmt, op0);
851 update_stmt (condstmt);
856 /* Initialize local stacks for this optimizer and record equivalences
857 upon entry to BB. Equivalences can come from the edge traversed to
858 reach BB or they may come from PHI nodes at the start of BB. */
860 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
861 LIMIT entries left in LOCALs. */
863 static void
864 remove_local_expressions_from_table (void)
866 /* Remove all the expressions made available in this block. */
867 while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
869 expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
870 void **slot;
872 if (victim == NULL)
873 break;
875 /* This must precede the actual removal from the hash table,
876 as ELEMENT and the table entry may share a call argument
877 vector which will be freed during removal. */
878 if (dump_file && (dump_flags & TDF_DETAILS))
880 fprintf (dump_file, "<<<< ");
881 print_expr_hash_elt (dump_file, victim);
884 slot = htab_find_slot_with_hash (avail_exprs,
885 victim, victim->hash, NO_INSERT);
886 gcc_assert (slot && *slot == (void *) victim);
887 htab_clear_slot (avail_exprs, slot);
891 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
892 CONST_AND_COPIES to its original state, stopping when we hit a
893 NULL marker. */
895 static void
896 restore_vars_to_original_value (void)
898 while (VEC_length (tree, const_and_copies_stack) > 0)
900 tree prev_value, dest;
902 dest = VEC_pop (tree, const_and_copies_stack);
904 if (dest == NULL)
905 break;
907 if (dump_file && (dump_flags & TDF_DETAILS))
909 fprintf (dump_file, "<<<< COPY ");
910 print_generic_expr (dump_file, dest, 0);
911 fprintf (dump_file, " = ");
912 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
913 fprintf (dump_file, "\n");
916 prev_value = VEC_pop (tree, const_and_copies_stack);
917 set_ssa_name_value (dest, prev_value);
921 /* A trivial wrapper so that we can present the generic jump
922 threading code with a simple API for simplifying statements. */
923 static tree
924 simplify_stmt_for_jump_threading (gimple stmt,
925 gimple within_stmt ATTRIBUTE_UNUSED)
927 return lookup_avail_expr (stmt, false);
930 /* Wrapper for common code to attempt to thread an edge. For example,
931 it handles lazily building the dummy condition and the bookkeeping
932 when jump threading is successful. */
934 static void
935 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
937 if (! walk_data->global_data)
939 gimple dummy_cond =
940 gimple_build_cond (NE_EXPR,
941 integer_zero_node, integer_zero_node,
942 NULL, NULL);
943 walk_data->global_data = dummy_cond;
946 thread_across_edge ((gimple) walk_data->global_data, e, false,
947 &const_and_copies_stack,
948 simplify_stmt_for_jump_threading);
951 /* PHI nodes can create equivalences too.
953 Ignoring any alternatives which are the same as the result, if
954 all the alternatives are equal, then the PHI node creates an
955 equivalence. */
957 static void
958 record_equivalences_from_phis (basic_block bb)
960 gimple_stmt_iterator gsi;
962 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
964 gimple phi = gsi_stmt (gsi);
966 tree lhs = gimple_phi_result (phi);
967 tree rhs = NULL;
968 size_t i;
970 for (i = 0; i < gimple_phi_num_args (phi); i++)
972 tree t = gimple_phi_arg_def (phi, i);
974 /* Ignore alternatives which are the same as our LHS. Since
975 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
976 can simply compare pointers. */
977 if (lhs == t)
978 continue;
980 /* If we have not processed an alternative yet, then set
981 RHS to this alternative. */
982 if (rhs == NULL)
983 rhs = t;
984 /* If we have processed an alternative (stored in RHS), then
985 see if it is equal to this one. If it isn't, then stop
986 the search. */
987 else if (! operand_equal_for_phi_arg_p (rhs, t))
988 break;
991 /* If we had no interesting alternatives, then all the RHS alternatives
992 must have been the same as LHS. */
993 if (!rhs)
994 rhs = lhs;
996 /* If we managed to iterate through each PHI alternative without
997 breaking out of the loop, then we have a PHI which may create
998 a useful equivalence. We do not need to record unwind data for
999 this, since this is a true assignment and not an equivalence
1000 inferred from a comparison. All uses of this ssa name are dominated
1001 by this assignment, so unwinding just costs time and space. */
1002 if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
1003 set_ssa_name_value (lhs, rhs);
1007 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1008 return that edge. Otherwise return NULL. */
1009 static edge
1010 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1012 edge retval = NULL;
1013 edge e;
1014 edge_iterator ei;
1016 FOR_EACH_EDGE (e, ei, bb->preds)
1018 /* A loop back edge can be identified by the destination of
1019 the edge dominating the source of the edge. */
1020 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1021 continue;
1023 /* If we have already seen a non-loop edge, then we must have
1024 multiple incoming non-loop edges and thus we return NULL. */
1025 if (retval)
1026 return NULL;
1028 /* This is the first non-loop incoming edge we have found. Record
1029 it. */
1030 retval = e;
1033 return retval;
1036 /* Record any equivalences created by the incoming edge to BB. If BB
1037 has more than one incoming edge, then no equivalence is created. */
1039 static void
1040 record_equivalences_from_incoming_edge (basic_block bb)
1042 edge e;
1043 basic_block parent;
1044 struct edge_info *edge_info;
1046 /* If our parent block ended with a control statement, then we may be
1047 able to record some equivalences based on which outgoing edge from
1048 the parent was followed. */
1049 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1051 e = single_incoming_edge_ignoring_loop_edges (bb);
1053 /* If we had a single incoming edge from our parent block, then enter
1054 any data associated with the edge into our tables. */
1055 if (e && e->src == parent)
1057 unsigned int i;
1059 edge_info = (struct edge_info *) e->aux;
1061 if (edge_info)
1063 tree lhs = edge_info->lhs;
1064 tree rhs = edge_info->rhs;
1065 cond_equivalence *eq;
1067 if (lhs)
1068 record_equality (lhs, rhs);
1070 for (i = 0; VEC_iterate (cond_equivalence,
1071 edge_info->cond_equivalences, i, eq); ++i)
1072 record_cond (eq);
1077 /* Dump SSA statistics on FILE. */
1079 void
1080 dump_dominator_optimization_stats (FILE *file)
1082 fprintf (file, "Total number of statements: %6ld\n\n",
1083 opt_stats.num_stmts);
1084 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1085 opt_stats.num_exprs_considered);
1087 fprintf (file, "\nHash table statistics:\n");
1089 fprintf (file, " avail_exprs: ");
1090 htab_statistics (file, avail_exprs);
1094 /* Dump SSA statistics on stderr. */
1096 DEBUG_FUNCTION void
1097 debug_dominator_optimization_stats (void)
1099 dump_dominator_optimization_stats (stderr);
1103 /* Dump statistics for the hash table HTAB. */
1105 static void
1106 htab_statistics (FILE *file, htab_t htab)
1108 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1109 (long) htab_size (htab),
1110 (long) htab_elements (htab),
1111 htab_collisions (htab));
1115 /* Enter condition equivalence into the expression hash table.
1116 This indicates that a conditional expression has a known
1117 boolean value. */
1119 static void
1120 record_cond (cond_equivalence *p)
1122 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1123 void **slot;
1125 initialize_hash_element_from_expr (&p->cond, p->value, element);
1127 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1128 element->hash, INSERT);
1129 if (*slot == NULL)
1131 *slot = (void *) element;
1133 if (dump_file && (dump_flags & TDF_DETAILS))
1135 fprintf (dump_file, "1>>> ");
1136 print_expr_hash_elt (dump_file, element);
1139 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
1141 else
1142 free (element);
1145 /* Build a cond_equivalence record indicating that the comparison
1146 CODE holds between operands OP0 and OP1 and push it to **P. */
1148 static void
1149 build_and_record_new_cond (enum tree_code code,
1150 tree op0, tree op1,
1151 VEC(cond_equivalence, heap) **p)
1153 cond_equivalence c;
1154 struct hashable_expr *cond = &c.cond;
1156 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1158 cond->type = boolean_type_node;
1159 cond->kind = EXPR_BINARY;
1160 cond->ops.binary.op = code;
1161 cond->ops.binary.opnd0 = op0;
1162 cond->ops.binary.opnd1 = op1;
1164 c.value = boolean_true_node;
1165 VEC_safe_push (cond_equivalence, heap, *p, &c);
1168 /* Record that COND is true and INVERTED is false into the edge information
1169 structure. Also record that any conditions dominated by COND are true
1170 as well.
1172 For example, if a < b is true, then a <= b must also be true. */
1174 static void
1175 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1177 tree op0, op1;
1178 cond_equivalence c;
1180 if (!COMPARISON_CLASS_P (cond))
1181 return;
1183 op0 = TREE_OPERAND (cond, 0);
1184 op1 = TREE_OPERAND (cond, 1);
1186 switch (TREE_CODE (cond))
1188 case LT_EXPR:
1189 case GT_EXPR:
1190 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1192 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1193 &edge_info->cond_equivalences);
1194 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1195 &edge_info->cond_equivalences);
1198 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1199 ? LE_EXPR : GE_EXPR),
1200 op0, op1, &edge_info->cond_equivalences);
1201 build_and_record_new_cond (NE_EXPR, op0, op1,
1202 &edge_info->cond_equivalences);
1203 break;
1205 case GE_EXPR:
1206 case LE_EXPR:
1207 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1209 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1210 &edge_info->cond_equivalences);
1212 break;
1214 case EQ_EXPR:
1215 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1217 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1218 &edge_info->cond_equivalences);
1220 build_and_record_new_cond (LE_EXPR, op0, op1,
1221 &edge_info->cond_equivalences);
1222 build_and_record_new_cond (GE_EXPR, op0, op1,
1223 &edge_info->cond_equivalences);
1224 break;
1226 case UNORDERED_EXPR:
1227 build_and_record_new_cond (NE_EXPR, op0, op1,
1228 &edge_info->cond_equivalences);
1229 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1230 &edge_info->cond_equivalences);
1231 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1232 &edge_info->cond_equivalences);
1233 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1234 &edge_info->cond_equivalences);
1235 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1236 &edge_info->cond_equivalences);
1237 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1238 &edge_info->cond_equivalences);
1239 break;
1241 case UNLT_EXPR:
1242 case UNGT_EXPR:
1243 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1244 ? UNLE_EXPR : UNGE_EXPR),
1245 op0, op1, &edge_info->cond_equivalences);
1246 build_and_record_new_cond (NE_EXPR, op0, op1,
1247 &edge_info->cond_equivalences);
1248 break;
1250 case UNEQ_EXPR:
1251 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1252 &edge_info->cond_equivalences);
1253 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1254 &edge_info->cond_equivalences);
1255 break;
1257 case LTGT_EXPR:
1258 build_and_record_new_cond (NE_EXPR, op0, op1,
1259 &edge_info->cond_equivalences);
1260 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1261 &edge_info->cond_equivalences);
1262 break;
1264 default:
1265 break;
1268 /* Now store the original true and false conditions into the first
1269 two slots. */
1270 initialize_expr_from_cond (cond, &c.cond);
1271 c.value = boolean_true_node;
1272 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1274 /* It is possible for INVERTED to be the negation of a comparison,
1275 and not a valid RHS or GIMPLE_COND condition. This happens because
1276 invert_truthvalue may return such an expression when asked to invert
1277 a floating-point comparison. These comparisons are not assumed to
1278 obey the trichotomy law. */
1279 initialize_expr_from_cond (inverted, &c.cond);
1280 c.value = boolean_false_node;
1281 VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, &c);
1284 /* A helper function for record_const_or_copy and record_equality.
1285 Do the work of recording the value and undo info. */
1287 static void
1288 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1290 set_ssa_name_value (x, y);
1292 if (dump_file && (dump_flags & TDF_DETAILS))
1294 fprintf (dump_file, "0>>> COPY ");
1295 print_generic_expr (dump_file, x, 0);
1296 fprintf (dump_file, " = ");
1297 print_generic_expr (dump_file, y, 0);
1298 fprintf (dump_file, "\n");
1301 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1302 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1303 VEC_quick_push (tree, const_and_copies_stack, x);
1306 /* Return the loop depth of the basic block of the defining statement of X.
1307 This number should not be treated as absolutely correct because the loop
1308 information may not be completely up-to-date when dom runs. However, it
1309 will be relatively correct, and as more passes are taught to keep loop info
1310 up to date, the result will become more and more accurate. */
1313 loop_depth_of_name (tree x)
1315 gimple defstmt;
1316 basic_block defbb;
1318 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1319 if (TREE_CODE (x) != SSA_NAME)
1320 return 0;
1322 /* Otherwise return the loop depth of the defining statement's bb.
1323 Note that there may not actually be a bb for this statement, if the
1324 ssa_name is live on entry. */
1325 defstmt = SSA_NAME_DEF_STMT (x);
1326 defbb = gimple_bb (defstmt);
1327 if (!defbb)
1328 return 0;
1330 return defbb->loop_depth;
1333 /* Record that X is equal to Y in const_and_copies. Record undo
1334 information in the block-local vector. */
1336 static void
1337 record_const_or_copy (tree x, tree y)
1339 tree prev_x = SSA_NAME_VALUE (x);
1341 gcc_assert (TREE_CODE (x) == SSA_NAME);
1343 if (TREE_CODE (y) == SSA_NAME)
1345 tree tmp = SSA_NAME_VALUE (y);
1346 if (tmp)
1347 y = tmp;
1350 record_const_or_copy_1 (x, y, prev_x);
1353 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1354 This constrains the cases in which we may treat this as assignment. */
1356 static void
1357 record_equality (tree x, tree y)
1359 tree prev_x = NULL, prev_y = NULL;
1361 if (TREE_CODE (x) == SSA_NAME)
1362 prev_x = SSA_NAME_VALUE (x);
1363 if (TREE_CODE (y) == SSA_NAME)
1364 prev_y = SSA_NAME_VALUE (y);
1366 /* If one of the previous values is invariant, or invariant in more loops
1367 (by depth), then use that.
1368 Otherwise it doesn't matter which value we choose, just so
1369 long as we canonicalize on one value. */
1370 if (is_gimple_min_invariant (y))
1372 else if (is_gimple_min_invariant (x)
1373 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1374 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1375 else if (prev_x && is_gimple_min_invariant (prev_x))
1376 x = y, y = prev_x, prev_x = prev_y;
1377 else if (prev_y)
1378 y = prev_y;
1380 /* After the swapping, we must have one SSA_NAME. */
1381 if (TREE_CODE (x) != SSA_NAME)
1382 return;
1384 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1385 variable compared against zero. If we're honoring signed zeros,
1386 then we cannot record this value unless we know that the value is
1387 nonzero. */
1388 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1389 && (TREE_CODE (y) != REAL_CST
1390 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1391 return;
1393 record_const_or_copy_1 (x, y, prev_x);
1396 /* Returns true when STMT is a simple iv increment. It detects the
1397 following situation:
1399 i_1 = phi (..., i_2)
1400 i_2 = i_1 +/- ... */
1402 static bool
1403 simple_iv_increment_p (gimple stmt)
1405 tree lhs, preinc;
1406 gimple phi;
1407 size_t i;
1409 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1410 return false;
1412 lhs = gimple_assign_lhs (stmt);
1413 if (TREE_CODE (lhs) != SSA_NAME)
1414 return false;
1416 if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
1417 && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
1418 return false;
1420 preinc = gimple_assign_rhs1 (stmt);
1422 if (TREE_CODE (preinc) != SSA_NAME)
1423 return false;
1425 phi = SSA_NAME_DEF_STMT (preinc);
1426 if (gimple_code (phi) != GIMPLE_PHI)
1427 return false;
1429 for (i = 0; i < gimple_phi_num_args (phi); i++)
1430 if (gimple_phi_arg_def (phi, i) == lhs)
1431 return true;
1433 return false;
1436 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1437 known value for that SSA_NAME (or NULL if no value is known).
1439 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1440 successors of BB. */
1442 static void
1443 cprop_into_successor_phis (basic_block bb)
1445 edge e;
1446 edge_iterator ei;
1448 FOR_EACH_EDGE (e, ei, bb->succs)
1450 int indx;
1451 gimple_stmt_iterator gsi;
1453 /* If this is an abnormal edge, then we do not want to copy propagate
1454 into the PHI alternative associated with this edge. */
1455 if (e->flags & EDGE_ABNORMAL)
1456 continue;
1458 gsi = gsi_start_phis (e->dest);
1459 if (gsi_end_p (gsi))
1460 continue;
1462 indx = e->dest_idx;
1463 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
1465 tree new_val;
1466 use_operand_p orig_p;
1467 tree orig_val;
1468 gimple phi = gsi_stmt (gsi);
1470 /* The alternative may be associated with a constant, so verify
1471 it is an SSA_NAME before doing anything with it. */
1472 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1473 orig_val = get_use_from_ptr (orig_p);
1474 if (TREE_CODE (orig_val) != SSA_NAME)
1475 continue;
1477 /* If we have *ORIG_P in our constant/copy table, then replace
1478 ORIG_P with its value in our constant/copy table. */
1479 new_val = SSA_NAME_VALUE (orig_val);
1480 if (new_val
1481 && new_val != orig_val
1482 && (TREE_CODE (new_val) == SSA_NAME
1483 || is_gimple_min_invariant (new_val))
1484 && may_propagate_copy (orig_val, new_val))
1485 propagate_value (orig_p, new_val);
1490 /* We have finished optimizing BB, record any information implied by
1491 taking a specific outgoing edge from BB. */
1493 static void
1494 record_edge_info (basic_block bb)
1496 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1497 struct edge_info *edge_info;
1499 if (! gsi_end_p (gsi))
1501 gimple stmt = gsi_stmt (gsi);
1502 location_t loc = gimple_location (stmt);
1504 if (gimple_code (stmt) == GIMPLE_SWITCH)
1506 tree index = gimple_switch_index (stmt);
1508 if (TREE_CODE (index) == SSA_NAME)
1510 int i;
1511 int n_labels = gimple_switch_num_labels (stmt);
1512 tree *info = XCNEWVEC (tree, last_basic_block);
1513 edge e;
1514 edge_iterator ei;
1516 for (i = 0; i < n_labels; i++)
1518 tree label = gimple_switch_label (stmt, i);
1519 basic_block target_bb = label_to_block (CASE_LABEL (label));
1520 if (CASE_HIGH (label)
1521 || !CASE_LOW (label)
1522 || info[target_bb->index])
1523 info[target_bb->index] = error_mark_node;
1524 else
1525 info[target_bb->index] = label;
1528 FOR_EACH_EDGE (e, ei, bb->succs)
1530 basic_block target_bb = e->dest;
1531 tree label = info[target_bb->index];
1533 if (label != NULL && label != error_mark_node)
1535 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1536 CASE_LOW (label));
1537 edge_info = allocate_edge_info (e);
1538 edge_info->lhs = index;
1539 edge_info->rhs = x;
1542 free (info);
1546 /* A COND_EXPR may create equivalences too. */
1547 if (gimple_code (stmt) == GIMPLE_COND)
1549 edge true_edge;
1550 edge false_edge;
1552 tree op0 = gimple_cond_lhs (stmt);
1553 tree op1 = gimple_cond_rhs (stmt);
1554 enum tree_code code = gimple_cond_code (stmt);
1556 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1558 /* Special case comparing booleans against a constant as we
1559 know the value of OP0 on both arms of the branch. i.e., we
1560 can record an equivalence for OP0 rather than COND. */
1561 if ((code == EQ_EXPR || code == NE_EXPR)
1562 && TREE_CODE (op0) == SSA_NAME
1563 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1564 && is_gimple_min_invariant (op1))
1566 if (code == EQ_EXPR)
1568 edge_info = allocate_edge_info (true_edge);
1569 edge_info->lhs = op0;
1570 edge_info->rhs = (integer_zerop (op1)
1571 ? boolean_false_node
1572 : boolean_true_node);
1574 edge_info = allocate_edge_info (false_edge);
1575 edge_info->lhs = op0;
1576 edge_info->rhs = (integer_zerop (op1)
1577 ? boolean_true_node
1578 : boolean_false_node);
1580 else
1582 edge_info = allocate_edge_info (true_edge);
1583 edge_info->lhs = op0;
1584 edge_info->rhs = (integer_zerop (op1)
1585 ? boolean_true_node
1586 : boolean_false_node);
1588 edge_info = allocate_edge_info (false_edge);
1589 edge_info->lhs = op0;
1590 edge_info->rhs = (integer_zerop (op1)
1591 ? boolean_false_node
1592 : boolean_true_node);
1595 else if (is_gimple_min_invariant (op0)
1596 && (TREE_CODE (op1) == SSA_NAME
1597 || is_gimple_min_invariant (op1)))
1599 tree cond = build2 (code, boolean_type_node, op0, op1);
1600 tree inverted = invert_truthvalue_loc (loc, cond);
1601 struct edge_info *edge_info;
1603 edge_info = allocate_edge_info (true_edge);
1604 record_conditions (edge_info, cond, inverted);
1606 if (code == EQ_EXPR)
1608 edge_info->lhs = op1;
1609 edge_info->rhs = op0;
1612 edge_info = allocate_edge_info (false_edge);
1613 record_conditions (edge_info, inverted, cond);
1615 if (TREE_CODE (inverted) == EQ_EXPR)
1617 edge_info->lhs = op1;
1618 edge_info->rhs = op0;
1622 else if (TREE_CODE (op0) == SSA_NAME
1623 && (is_gimple_min_invariant (op1)
1624 || TREE_CODE (op1) == SSA_NAME))
1626 tree cond = build2 (code, boolean_type_node, op0, op1);
1627 tree inverted = invert_truthvalue_loc (loc, cond);
1628 struct edge_info *edge_info;
1630 edge_info = allocate_edge_info (true_edge);
1631 record_conditions (edge_info, cond, inverted);
1633 if (code == EQ_EXPR)
1635 edge_info->lhs = op0;
1636 edge_info->rhs = op1;
1639 edge_info = allocate_edge_info (false_edge);
1640 record_conditions (edge_info, inverted, cond);
1642 if (TREE_CODE (inverted) == EQ_EXPR)
1644 edge_info->lhs = op0;
1645 edge_info->rhs = op1;
1650 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1654 static void
1655 dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1656 basic_block bb)
1658 gimple_stmt_iterator gsi;
1660 if (dump_file && (dump_flags & TDF_DETAILS))
1661 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1663 /* Push a marker on the stacks of local information so that we know how
1664 far to unwind when we finalize this block. */
1665 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1666 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1668 record_equivalences_from_incoming_edge (bb);
1670 /* PHI nodes can create equivalences too. */
1671 record_equivalences_from_phis (bb);
1673 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1674 optimize_stmt (bb, gsi);
1676 /* Now prepare to process dominated blocks. */
1677 record_edge_info (bb);
1678 cprop_into_successor_phis (bb);
1681 /* We have finished processing the dominator children of BB, perform
1682 any finalization actions in preparation for leaving this node in
1683 the dominator tree. */
1685 static void
1686 dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
1688 gimple last;
1690 /* If we have an outgoing edge to a block with multiple incoming and
1691 outgoing edges, then we may be able to thread the edge, i.e., we
1692 may be able to statically determine which of the outgoing edges
1693 will be traversed when the incoming edge from BB is traversed. */
1694 if (single_succ_p (bb)
1695 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1696 && potentially_threadable_block (single_succ (bb)))
1698 dom_thread_across_edge (walk_data, single_succ_edge (bb));
1700 else if ((last = last_stmt (bb))
1701 && gimple_code (last) == GIMPLE_COND
1702 && EDGE_COUNT (bb->succs) == 2
1703 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1704 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1706 edge true_edge, false_edge;
1708 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1710 /* Only try to thread the edge if it reaches a target block with
1711 more than one predecessor and more than one successor. */
1712 if (potentially_threadable_block (true_edge->dest))
1714 struct edge_info *edge_info;
1715 unsigned int i;
1717 /* Push a marker onto the available expression stack so that we
1718 unwind any expressions related to the TRUE arm before processing
1719 the false arm below. */
1720 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
1721 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1723 edge_info = (struct edge_info *) true_edge->aux;
1725 /* If we have info associated with this edge, record it into
1726 our equivalence tables. */
1727 if (edge_info)
1729 cond_equivalence *eq;
1730 tree lhs = edge_info->lhs;
1731 tree rhs = edge_info->rhs;
1733 /* If we have a simple NAME = VALUE equivalence, record it. */
1734 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1735 record_const_or_copy (lhs, rhs);
1737 /* If we have 0 = COND or 1 = COND equivalences, record them
1738 into our expression hash tables. */
1739 for (i = 0; VEC_iterate (cond_equivalence,
1740 edge_info->cond_equivalences, i, eq); ++i)
1741 record_cond (eq);
1744 dom_thread_across_edge (walk_data, true_edge);
1746 /* And restore the various tables to their state before
1747 we threaded this edge. */
1748 remove_local_expressions_from_table ();
1751 /* Similarly for the ELSE arm. */
1752 if (potentially_threadable_block (false_edge->dest))
1754 struct edge_info *edge_info;
1755 unsigned int i;
1757 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1758 edge_info = (struct edge_info *) false_edge->aux;
1760 /* If we have info associated with this edge, record it into
1761 our equivalence tables. */
1762 if (edge_info)
1764 cond_equivalence *eq;
1765 tree lhs = edge_info->lhs;
1766 tree rhs = edge_info->rhs;
1768 /* If we have a simple NAME = VALUE equivalence, record it. */
1769 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1770 record_const_or_copy (lhs, rhs);
1772 /* If we have 0 = COND or 1 = COND equivalences, record them
1773 into our expression hash tables. */
1774 for (i = 0; VEC_iterate (cond_equivalence,
1775 edge_info->cond_equivalences, i, eq); ++i)
1776 record_cond (eq);
1779 /* Now thread the edge. */
1780 dom_thread_across_edge (walk_data, false_edge);
1782 /* No need to remove local expressions from our tables
1783 or restore vars to their original value as that will
1784 be done immediately below. */
1788 remove_local_expressions_from_table ();
1789 restore_vars_to_original_value ();
1792 /* Search for redundant computations in STMT. If any are found, then
1793 replace them with the variable holding the result of the computation.
1795 If safe, record this expression into the available expression hash
1796 table. */
1798 static void
1799 eliminate_redundant_computations (gimple_stmt_iterator* gsi)
1801 tree expr_type;
1802 tree cached_lhs;
1803 bool insert = true;
1804 bool assigns_var_p = false;
1806 gimple stmt = gsi_stmt (*gsi);
1808 tree def = gimple_get_lhs (stmt);
1810 /* Certain expressions on the RHS can be optimized away, but can not
1811 themselves be entered into the hash tables. */
1812 if (! def
1813 || TREE_CODE (def) != SSA_NAME
1814 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1815 || gimple_vdef (stmt)
1816 /* Do not record equivalences for increments of ivs. This would create
1817 overlapping live ranges for a very questionable gain. */
1818 || simple_iv_increment_p (stmt))
1819 insert = false;
1821 /* Check if the expression has been computed before. */
1822 cached_lhs = lookup_avail_expr (stmt, insert);
1824 opt_stats.num_exprs_considered++;
1826 /* Get the type of the expression we are trying to optimize. */
1827 if (is_gimple_assign (stmt))
1829 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
1830 assigns_var_p = true;
1832 else if (gimple_code (stmt) == GIMPLE_COND)
1833 expr_type = boolean_type_node;
1834 else if (is_gimple_call (stmt))
1836 gcc_assert (gimple_call_lhs (stmt));
1837 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
1838 assigns_var_p = true;
1840 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1841 expr_type = TREE_TYPE (gimple_switch_index (stmt));
1842 else
1843 gcc_unreachable ();
1845 if (!cached_lhs)
1846 return;
1848 /* It is safe to ignore types here since we have already done
1849 type checking in the hashing and equality routines. In fact
1850 type checking here merely gets in the way of constant
1851 propagation. Also, make sure that it is safe to propagate
1852 CACHED_LHS into the expression in STMT. */
1853 if ((TREE_CODE (cached_lhs) != SSA_NAME
1854 && (assigns_var_p
1855 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
1856 || may_propagate_copy_into_stmt (stmt, cached_lhs))
1858 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
1859 || is_gimple_min_invariant (cached_lhs));
1861 if (dump_file && (dump_flags & TDF_DETAILS))
1863 fprintf (dump_file, " Replaced redundant expr '");
1864 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1865 fprintf (dump_file, "' with '");
1866 print_generic_expr (dump_file, cached_lhs, dump_flags);
1867 fprintf (dump_file, "'\n");
1870 opt_stats.num_re++;
1872 if (assigns_var_p
1873 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
1874 cached_lhs = fold_convert (expr_type, cached_lhs);
1876 propagate_tree_value_into_stmt (gsi, cached_lhs);
1878 /* Since it is always necessary to mark the result as modified,
1879 perhaps we should move this into propagate_tree_value_into_stmt
1880 itself. */
1881 gimple_set_modified (gsi_stmt (*gsi), true);
1885 /* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
1886 the available expressions table or the const_and_copies table.
1887 Detect and record those equivalences. */
1888 /* We handle only very simple copy equivalences here. The heavy
1889 lifing is done by eliminate_redundant_computations. */
1891 static void
1892 record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
1894 tree lhs;
1895 enum tree_code lhs_code;
1897 gcc_assert (is_gimple_assign (stmt));
1899 lhs = gimple_assign_lhs (stmt);
1900 lhs_code = TREE_CODE (lhs);
1902 if (lhs_code == SSA_NAME
1903 && gimple_assign_single_p (stmt))
1905 tree rhs = gimple_assign_rhs1 (stmt);
1907 /* If the RHS of the assignment is a constant or another variable that
1908 may be propagated, register it in the CONST_AND_COPIES table. We
1909 do not need to record unwind data for this, since this is a true
1910 assignment and not an equivalence inferred from a comparison. All
1911 uses of this ssa name are dominated by this assignment, so unwinding
1912 just costs time and space. */
1913 if (may_optimize_p
1914 && (TREE_CODE (rhs) == SSA_NAME
1915 || is_gimple_min_invariant (rhs)))
1917 if (dump_file && (dump_flags & TDF_DETAILS))
1919 fprintf (dump_file, "==== ASGN ");
1920 print_generic_expr (dump_file, lhs, 0);
1921 fprintf (dump_file, " = ");
1922 print_generic_expr (dump_file, rhs, 0);
1923 fprintf (dump_file, "\n");
1926 set_ssa_name_value (lhs, rhs);
1930 /* A memory store, even an aliased store, creates a useful
1931 equivalence. By exchanging the LHS and RHS, creating suitable
1932 vops and recording the result in the available expression table,
1933 we may be able to expose more redundant loads. */
1934 if (!gimple_has_volatile_ops (stmt)
1935 && gimple_references_memory_p (stmt)
1936 && gimple_assign_single_p (stmt)
1937 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1938 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
1939 && !is_gimple_reg (lhs))
1941 tree rhs = gimple_assign_rhs1 (stmt);
1942 gimple new_stmt;
1944 /* Build a new statement with the RHS and LHS exchanged. */
1945 if (TREE_CODE (rhs) == SSA_NAME)
1947 /* NOTE tuples. The call to gimple_build_assign below replaced
1948 a call to build_gimple_modify_stmt, which did not set the
1949 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
1950 may cause an SSA validation failure, as the LHS may be a
1951 default-initialized name and should have no definition. I'm
1952 a bit dubious of this, as the artificial statement that we
1953 generate here may in fact be ill-formed, but it is simply
1954 used as an internal device in this pass, and never becomes
1955 part of the CFG. */
1956 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
1957 new_stmt = gimple_build_assign (rhs, lhs);
1958 SSA_NAME_DEF_STMT (rhs) = defstmt;
1960 else
1961 new_stmt = gimple_build_assign (rhs, lhs);
1963 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
1965 /* Finally enter the statement into the available expression
1966 table. */
1967 lookup_avail_expr (new_stmt, true);
1971 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1972 CONST_AND_COPIES. */
1974 static void
1975 cprop_operand (gimple stmt, use_operand_p op_p)
1977 tree val;
1978 tree op = USE_FROM_PTR (op_p);
1980 /* If the operand has a known constant value or it is known to be a
1981 copy of some other variable, use the value or copy stored in
1982 CONST_AND_COPIES. */
1983 val = SSA_NAME_VALUE (op);
1984 if (val && val != op)
1986 /* Do not change the base variable in the virtual operand
1987 tables. That would make it impossible to reconstruct
1988 the renamed virtual operand if we later modify this
1989 statement. Also only allow the new value to be an SSA_NAME
1990 for propagation into virtual operands. */
1991 if (!is_gimple_reg (op)
1992 && (TREE_CODE (val) != SSA_NAME
1993 || is_gimple_reg (val)
1994 || get_virtual_var (val) != get_virtual_var (op)))
1995 return;
1997 /* Do not replace hard register operands in asm statements. */
1998 if (gimple_code (stmt) == GIMPLE_ASM
1999 && !may_propagate_copy_into_asm (op))
2000 return;
2002 /* Certain operands are not allowed to be copy propagated due
2003 to their interaction with exception handling and some GCC
2004 extensions. */
2005 if (!may_propagate_copy (op, val))
2006 return;
2008 /* Do not propagate addresses that point to volatiles into memory
2009 stmts without volatile operands. */
2010 if (POINTER_TYPE_P (TREE_TYPE (val))
2011 && TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (val)))
2012 && gimple_has_mem_ops (stmt)
2013 && !gimple_has_volatile_ops (stmt))
2014 return;
2016 /* Do not propagate copies if the propagated value is at a deeper loop
2017 depth than the propagatee. Otherwise, this may move loop variant
2018 variables outside of their loops and prevent coalescing
2019 opportunities. If the value was loop invariant, it will be hoisted
2020 by LICM and exposed for copy propagation. */
2021 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2022 return;
2024 /* Do not propagate copies into simple IV increment statements.
2025 See PR23821 for how this can disturb IV analysis. */
2026 if (TREE_CODE (val) != INTEGER_CST
2027 && simple_iv_increment_p (stmt))
2028 return;
2030 /* Dump details. */
2031 if (dump_file && (dump_flags & TDF_DETAILS))
2033 fprintf (dump_file, " Replaced '");
2034 print_generic_expr (dump_file, op, dump_flags);
2035 fprintf (dump_file, "' with %s '",
2036 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2037 print_generic_expr (dump_file, val, dump_flags);
2038 fprintf (dump_file, "'\n");
2041 if (TREE_CODE (val) != SSA_NAME)
2042 opt_stats.num_const_prop++;
2043 else
2044 opt_stats.num_copy_prop++;
2046 propagate_value (op_p, val);
2048 /* And note that we modified this statement. This is now
2049 safe, even if we changed virtual operands since we will
2050 rescan the statement and rewrite its operands again. */
2051 gimple_set_modified (stmt, true);
2055 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2056 known value for that SSA_NAME (or NULL if no value is known).
2058 Propagate values from CONST_AND_COPIES into the uses, vuses and
2059 vdef_ops of STMT. */
2061 static void
2062 cprop_into_stmt (gimple stmt)
2064 use_operand_p op_p;
2065 ssa_op_iter iter;
2067 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2069 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2070 cprop_operand (stmt, op_p);
2074 /* Optimize the statement pointed to by iterator SI.
2076 We try to perform some simplistic global redundancy elimination and
2077 constant propagation:
2079 1- To detect global redundancy, we keep track of expressions that have
2080 been computed in this block and its dominators. If we find that the
2081 same expression is computed more than once, we eliminate repeated
2082 computations by using the target of the first one.
2084 2- Constant values and copy assignments. This is used to do very
2085 simplistic constant and copy propagation. When a constant or copy
2086 assignment is found, we map the value on the RHS of the assignment to
2087 the variable in the LHS in the CONST_AND_COPIES table. */
2089 static void
2090 optimize_stmt (basic_block bb, gimple_stmt_iterator si)
2092 gimple stmt, old_stmt;
2093 bool may_optimize_p;
2094 bool modified_p = false;
2096 old_stmt = stmt = gsi_stmt (si);
2098 if (gimple_code (stmt) == GIMPLE_COND)
2099 canonicalize_comparison (stmt);
2101 update_stmt_if_modified (stmt);
2102 opt_stats.num_stmts++;
2104 if (dump_file && (dump_flags & TDF_DETAILS))
2106 fprintf (dump_file, "Optimizing statement ");
2107 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2110 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
2111 cprop_into_stmt (stmt);
2113 /* If the statement has been modified with constant replacements,
2114 fold its RHS before checking for redundant computations. */
2115 if (gimple_modified_p (stmt))
2117 tree rhs = NULL;
2119 /* Try to fold the statement making sure that STMT is kept
2120 up to date. */
2121 if (fold_stmt (&si))
2123 stmt = gsi_stmt (si);
2124 gimple_set_modified (stmt, true);
2126 if (dump_file && (dump_flags & TDF_DETAILS))
2128 fprintf (dump_file, " Folded to: ");
2129 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2133 /* We only need to consider cases that can yield a gimple operand. */
2134 if (gimple_assign_single_p (stmt))
2135 rhs = gimple_assign_rhs1 (stmt);
2136 else if (gimple_code (stmt) == GIMPLE_GOTO)
2137 rhs = gimple_goto_dest (stmt);
2138 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2139 /* This should never be an ADDR_EXPR. */
2140 rhs = gimple_switch_index (stmt);
2142 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2143 recompute_tree_invariant_for_addr_expr (rhs);
2145 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2146 even if fold_stmt updated the stmt already and thus cleared
2147 gimple_modified_p flag on it. */
2148 modified_p = true;
2151 /* Check for redundant computations. Do this optimization only
2152 for assignments that have no volatile ops and conditionals. */
2153 may_optimize_p = (!gimple_has_volatile_ops (stmt)
2154 && ((is_gimple_assign (stmt)
2155 && !gimple_rhs_has_side_effects (stmt))
2156 || (is_gimple_call (stmt)
2157 && gimple_call_lhs (stmt) != NULL_TREE
2158 && !gimple_rhs_has_side_effects (stmt))
2159 || gimple_code (stmt) == GIMPLE_COND
2160 || gimple_code (stmt) == GIMPLE_SWITCH));
2162 if (may_optimize_p)
2164 if (gimple_code (stmt) == GIMPLE_CALL)
2166 /* Resolve __builtin_constant_p. If it hasn't been
2167 folded to integer_one_node by now, it's fairly
2168 certain that the value simply isn't constant. */
2169 tree callee = gimple_call_fndecl (stmt);
2170 if (callee
2171 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2172 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2174 propagate_tree_value_into_stmt (&si, integer_zero_node);
2175 stmt = gsi_stmt (si);
2179 update_stmt_if_modified (stmt);
2180 eliminate_redundant_computations (&si);
2181 stmt = gsi_stmt (si);
2183 /* Perform simple redundant store elimination. */
2184 if (gimple_assign_single_p (stmt)
2185 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2187 tree lhs = gimple_assign_lhs (stmt);
2188 tree rhs = gimple_assign_rhs1 (stmt);
2189 tree cached_lhs;
2190 gimple new_stmt;
2191 if (TREE_CODE (rhs) == SSA_NAME)
2193 tree tem = SSA_NAME_VALUE (rhs);
2194 if (tem)
2195 rhs = tem;
2197 /* Build a new statement with the RHS and LHS exchanged. */
2198 if (TREE_CODE (rhs) == SSA_NAME)
2200 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2201 new_stmt = gimple_build_assign (rhs, lhs);
2202 SSA_NAME_DEF_STMT (rhs) = defstmt;
2204 else
2205 new_stmt = gimple_build_assign (rhs, lhs);
2206 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2207 cached_lhs = lookup_avail_expr (new_stmt, false);
2208 if (cached_lhs
2209 && rhs == cached_lhs)
2211 basic_block bb = gimple_bb (stmt);
2212 int lp_nr = lookup_stmt_eh_lp (stmt);
2213 unlink_stmt_vdef (stmt);
2214 gsi_remove (&si, true);
2215 if (lp_nr != 0)
2217 bitmap_set_bit (need_eh_cleanup, bb->index);
2218 if (dump_file && (dump_flags & TDF_DETAILS))
2219 fprintf (dump_file, " Flagged to clear EH edges.\n");
2221 return;
2226 /* Record any additional equivalences created by this statement. */
2227 if (is_gimple_assign (stmt))
2228 record_equivalences_from_stmt (stmt, may_optimize_p);
2230 /* If STMT is a COND_EXPR and it was modified, then we may know
2231 where it goes. If that is the case, then mark the CFG as altered.
2233 This will cause us to later call remove_unreachable_blocks and
2234 cleanup_tree_cfg when it is safe to do so. It is not safe to
2235 clean things up here since removal of edges and such can trigger
2236 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2237 the manager.
2239 That's all fine and good, except that once SSA_NAMEs are released
2240 to the manager, we must not call create_ssa_name until all references
2241 to released SSA_NAMEs have been eliminated.
2243 All references to the deleted SSA_NAMEs can not be eliminated until
2244 we remove unreachable blocks.
2246 We can not remove unreachable blocks until after we have completed
2247 any queued jump threading.
2249 We can not complete any queued jump threads until we have taken
2250 appropriate variables out of SSA form. Taking variables out of
2251 SSA form can call create_ssa_name and thus we lose.
2253 Ultimately I suspect we're going to need to change the interface
2254 into the SSA_NAME manager. */
2255 if (gimple_modified_p (stmt) || modified_p)
2257 tree val = NULL;
2259 update_stmt_if_modified (stmt);
2261 if (gimple_code (stmt) == GIMPLE_COND)
2262 val = fold_binary_loc (gimple_location (stmt),
2263 gimple_cond_code (stmt), boolean_type_node,
2264 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
2265 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2266 val = gimple_switch_index (stmt);
2268 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2269 cfg_altered = true;
2271 /* If we simplified a statement in such a way as to be shown that it
2272 cannot trap, update the eh information and the cfg to match. */
2273 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2275 bitmap_set_bit (need_eh_cleanup, bb->index);
2276 if (dump_file && (dump_flags & TDF_DETAILS))
2277 fprintf (dump_file, " Flagged to clear EH edges.\n");
2282 /* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2283 If found, return its LHS. Otherwise insert STMT in the table and
2284 return NULL_TREE.
2286 Also, when an expression is first inserted in the table, it is also
2287 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2288 we finish processing this block and its children. */
2290 static tree
2291 lookup_avail_expr (gimple stmt, bool insert)
2293 void **slot;
2294 tree lhs;
2295 tree temp;
2296 struct expr_hash_elt element;
2298 /* Get LHS of assignment or call, else NULL_TREE. */
2299 lhs = gimple_get_lhs (stmt);
2301 initialize_hash_element (stmt, lhs, &element);
2303 if (dump_file && (dump_flags & TDF_DETAILS))
2305 fprintf (dump_file, "LKUP ");
2306 print_expr_hash_elt (dump_file, &element);
2309 /* Don't bother remembering constant assignments and copy operations.
2310 Constants and copy operations are handled by the constant/copy propagator
2311 in optimize_stmt. */
2312 if (element.expr.kind == EXPR_SINGLE
2313 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2314 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2315 return NULL_TREE;
2317 /* Finally try to find the expression in the main expression hash table. */
2318 slot = htab_find_slot_with_hash (avail_exprs, &element, element.hash,
2319 (insert ? INSERT : NO_INSERT));
2320 if (slot == NULL)
2321 return NULL_TREE;
2323 if (*slot == NULL)
2325 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2326 *element2 = element;
2327 element2->stamp = element2;
2328 *slot = (void *) element2;
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2332 fprintf (dump_file, "2>>> ");
2333 print_expr_hash_elt (dump_file, element2);
2336 VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
2337 return NULL_TREE;
2340 /* Extract the LHS of the assignment so that it can be used as the current
2341 definition of another variable. */
2342 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2344 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2345 use the value from the const_and_copies table. */
2346 if (TREE_CODE (lhs) == SSA_NAME)
2348 temp = SSA_NAME_VALUE (lhs);
2349 if (temp)
2350 lhs = temp;
2353 if (dump_file && (dump_flags & TDF_DETAILS))
2355 fprintf (dump_file, "FIND: ");
2356 print_generic_expr (dump_file, lhs, 0);
2357 fprintf (dump_file, "\n");
2360 return lhs;
2363 /* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2364 for expressions using the code of the expression and the SSA numbers of
2365 its operands. */
2367 static hashval_t
2368 avail_expr_hash (const void *p)
2370 gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
2371 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2372 tree vuse;
2373 hashval_t val = 0;
2375 val = iterative_hash_hashable_expr (expr, val);
2377 /* If the hash table entry is not associated with a statement, then we
2378 can just hash the expression and not worry about virtual operands
2379 and such. */
2380 if (!stmt)
2381 return val;
2383 /* Add the SSA version numbers of the vuse operand. This is important
2384 because compound variables like arrays are not renamed in the
2385 operands. Rather, the rename is done on the virtual variable
2386 representing all the elements of the array. */
2387 if ((vuse = gimple_vuse (stmt)))
2388 val = iterative_hash_expr (vuse, val);
2390 return val;
2393 static hashval_t
2394 real_avail_expr_hash (const void *p)
2396 return ((const struct expr_hash_elt *)p)->hash;
2399 static int
2400 avail_expr_eq (const void *p1, const void *p2)
2402 gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
2403 const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
2404 const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
2405 gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
2406 const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
2407 const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
2409 /* This case should apply only when removing entries from the table. */
2410 if (stamp1 == stamp2)
2411 return true;
2413 /* FIXME tuples:
2414 We add stmts to a hash table and them modify them. To detect the case
2415 that we modify a stmt and then search for it, we assume that the hash
2416 is always modified by that change.
2417 We have to fully check why this doesn't happen on trunk or rewrite
2418 this in a more reliable (and easier to understand) way. */
2419 if (((const struct expr_hash_elt *)p1)->hash
2420 != ((const struct expr_hash_elt *)p2)->hash)
2421 return false;
2423 /* In case of a collision, both RHS have to be identical and have the
2424 same VUSE operands. */
2425 if (hashable_expr_equal_p (expr1, expr2)
2426 && types_compatible_p (expr1->type, expr2->type))
2428 /* Note that STMT1 and/or STMT2 may be NULL. */
2429 return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
2430 == (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
2433 return false;
2436 /* PHI-ONLY copy and constant propagation. This pass is meant to clean
2437 up degenerate PHIs created by or exposed by jump threading. */
2439 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2440 NULL. */
2442 tree
2443 degenerate_phi_result (gimple phi)
2445 tree lhs = gimple_phi_result (phi);
2446 tree val = NULL;
2447 size_t i;
2449 /* Ignoring arguments which are the same as LHS, if all the remaining
2450 arguments are the same, then the PHI is a degenerate and has the
2451 value of that common argument. */
2452 for (i = 0; i < gimple_phi_num_args (phi); i++)
2454 tree arg = gimple_phi_arg_def (phi, i);
2456 if (arg == lhs)
2457 continue;
2458 else if (!arg)
2459 break;
2460 else if (!val)
2461 val = arg;
2462 else if (arg == val)
2463 continue;
2464 /* We bring in some of operand_equal_p not only to speed things
2465 up, but also to avoid crashing when dereferencing the type of
2466 a released SSA name. */
2467 else if (TREE_CODE (val) != TREE_CODE (arg)
2468 || TREE_CODE (val) == SSA_NAME
2469 || !operand_equal_p (arg, val, 0))
2470 break;
2472 return (i == gimple_phi_num_args (phi) ? val : NULL);
2475 /* Given a statement STMT, which is either a PHI node or an assignment,
2476 remove it from the IL. */
2478 static void
2479 remove_stmt_or_phi (gimple stmt)
2481 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2483 if (gimple_code (stmt) == GIMPLE_PHI)
2484 remove_phi_node (&gsi, true);
2485 else
2487 gsi_remove (&gsi, true);
2488 release_defs (stmt);
2492 /* Given a statement STMT, which is either a PHI node or an assignment,
2493 return the "rhs" of the node, in the case of a non-degenerate
2494 phi, NULL is returned. */
2496 static tree
2497 get_rhs_or_phi_arg (gimple stmt)
2499 if (gimple_code (stmt) == GIMPLE_PHI)
2500 return degenerate_phi_result (stmt);
2501 else if (gimple_assign_single_p (stmt))
2502 return gimple_assign_rhs1 (stmt);
2503 else
2504 gcc_unreachable ();
2508 /* Given a statement STMT, which is either a PHI node or an assignment,
2509 return the "lhs" of the node. */
2511 static tree
2512 get_lhs_or_phi_result (gimple stmt)
2514 if (gimple_code (stmt) == GIMPLE_PHI)
2515 return gimple_phi_result (stmt);
2516 else if (is_gimple_assign (stmt))
2517 return gimple_assign_lhs (stmt);
2518 else
2519 gcc_unreachable ();
2522 /* Propagate RHS into all uses of LHS (when possible).
2524 RHS and LHS are derived from STMT, which is passed in solely so
2525 that we can remove it if propagation is successful.
2527 When propagating into a PHI node or into a statement which turns
2528 into a trivial copy or constant initialization, set the
2529 appropriate bit in INTERESTING_NAMEs so that we will visit those
2530 nodes as well in an effort to pick up secondary optimization
2531 opportunities. */
2533 static void
2534 propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
2536 /* First verify that propagation is valid and isn't going to move a
2537 loop variant variable outside its loop. */
2538 if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2539 && (TREE_CODE (rhs) != SSA_NAME
2540 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2541 && may_propagate_copy (lhs, rhs)
2542 && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2544 use_operand_p use_p;
2545 imm_use_iterator iter;
2546 gimple use_stmt;
2547 bool all = true;
2549 /* Dump details. */
2550 if (dump_file && (dump_flags & TDF_DETAILS))
2552 fprintf (dump_file, " Replacing '");
2553 print_generic_expr (dump_file, lhs, dump_flags);
2554 fprintf (dump_file, "' with %s '",
2555 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2556 print_generic_expr (dump_file, rhs, dump_flags);
2557 fprintf (dump_file, "'\n");
2560 /* Walk over every use of LHS and try to replace the use with RHS.
2561 At this point the only reason why such a propagation would not
2562 be successful would be if the use occurs in an ASM_EXPR. */
2563 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2565 /* Leave debug stmts alone. If we succeed in propagating
2566 all non-debug uses, we'll drop the DEF, and propagation
2567 into debug stmts will occur then. */
2568 if (gimple_debug_bind_p (use_stmt))
2569 continue;
2571 /* It's not always safe to propagate into an ASM_EXPR. */
2572 if (gimple_code (use_stmt) == GIMPLE_ASM
2573 && ! may_propagate_copy_into_asm (lhs))
2575 all = false;
2576 continue;
2579 /* It's not ok to propagate into the definition stmt of RHS.
2580 <bb 9>:
2581 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2582 g_67.1_6 = prephitmp.12_36;
2583 goto <bb 9>;
2584 While this is strictly all dead code we do not want to
2585 deal with this here. */
2586 if (TREE_CODE (rhs) == SSA_NAME
2587 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2589 all = false;
2590 continue;
2593 /* Dump details. */
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, " Original statement:");
2597 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2600 /* Propagate the RHS into this use of the LHS. */
2601 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2602 propagate_value (use_p, rhs);
2604 /* Special cases to avoid useless calls into the folding
2605 routines, operand scanning, etc.
2607 First, propagation into a PHI may cause the PHI to become
2608 a degenerate, so mark the PHI as interesting. No other
2609 actions are necessary.
2611 Second, if we're propagating a virtual operand and the
2612 propagation does not change the underlying _DECL node for
2613 the virtual operand, then no further actions are necessary. */
2614 if (gimple_code (use_stmt) == GIMPLE_PHI
2615 || (! is_gimple_reg (lhs)
2616 && TREE_CODE (rhs) == SSA_NAME
2617 && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2619 /* Dump details. */
2620 if (dump_file && (dump_flags & TDF_DETAILS))
2622 fprintf (dump_file, " Updated statement:");
2623 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2626 /* Propagation into a PHI may expose new degenerate PHIs,
2627 so mark the result of the PHI as interesting. */
2628 if (gimple_code (use_stmt) == GIMPLE_PHI)
2630 tree result = get_lhs_or_phi_result (use_stmt);
2631 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2634 continue;
2637 /* From this point onward we are propagating into a
2638 real statement. Folding may (or may not) be possible,
2639 we may expose new operands, expose dead EH edges,
2640 etc. */
2641 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2642 cannot fold a call that simplifies to a constant,
2643 because the GIMPLE_CALL must be replaced by a
2644 GIMPLE_ASSIGN, and there is no way to effect such a
2645 transformation in-place. We might want to consider
2646 using the more general fold_stmt here. */
2647 fold_stmt_inplace (use_stmt);
2649 /* Sometimes propagation can expose new operands to the
2650 renamer. */
2651 update_stmt (use_stmt);
2653 /* Dump details. */
2654 if (dump_file && (dump_flags & TDF_DETAILS))
2656 fprintf (dump_file, " Updated statement:");
2657 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
2660 /* If we replaced a variable index with a constant, then
2661 we would need to update the invariant flag for ADDR_EXPRs. */
2662 if (gimple_assign_single_p (use_stmt)
2663 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
2664 recompute_tree_invariant_for_addr_expr
2665 (gimple_assign_rhs1 (use_stmt));
2667 /* If we cleaned up EH information from the statement,
2668 mark its containing block as needing EH cleanups. */
2669 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2671 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
2672 if (dump_file && (dump_flags & TDF_DETAILS))
2673 fprintf (dump_file, " Flagged to clear EH edges.\n");
2676 /* Propagation may expose new trivial copy/constant propagation
2677 opportunities. */
2678 if (gimple_assign_single_p (use_stmt)
2679 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2680 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2681 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2683 tree result = get_lhs_or_phi_result (use_stmt);
2684 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2687 /* Propagation into these nodes may make certain edges in
2688 the CFG unexecutable. We want to identify them as PHI nodes
2689 at the destination of those unexecutable edges may become
2690 degenerates. */
2691 else if (gimple_code (use_stmt) == GIMPLE_COND
2692 || gimple_code (use_stmt) == GIMPLE_SWITCH
2693 || gimple_code (use_stmt) == GIMPLE_GOTO)
2695 tree val;
2697 if (gimple_code (use_stmt) == GIMPLE_COND)
2698 val = fold_binary_loc (gimple_location (use_stmt),
2699 gimple_cond_code (use_stmt),
2700 boolean_type_node,
2701 gimple_cond_lhs (use_stmt),
2702 gimple_cond_rhs (use_stmt));
2703 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
2704 val = gimple_switch_index (use_stmt);
2705 else
2706 val = gimple_goto_dest (use_stmt);
2708 if (val && is_gimple_min_invariant (val))
2710 basic_block bb = gimple_bb (use_stmt);
2711 edge te = find_taken_edge (bb, val);
2712 edge_iterator ei;
2713 edge e;
2714 gimple_stmt_iterator gsi, psi;
2716 /* Remove all outgoing edges except TE. */
2717 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2719 if (e != te)
2721 /* Mark all the PHI nodes at the destination of
2722 the unexecutable edge as interesting. */
2723 for (psi = gsi_start_phis (e->dest);
2724 !gsi_end_p (psi);
2725 gsi_next (&psi))
2727 gimple phi = gsi_stmt (psi);
2729 tree result = gimple_phi_result (phi);
2730 int version = SSA_NAME_VERSION (result);
2732 bitmap_set_bit (interesting_names, version);
2735 te->probability += e->probability;
2737 te->count += e->count;
2738 remove_edge (e);
2739 cfg_altered = true;
2741 else
2742 ei_next (&ei);
2745 gsi = gsi_last_bb (gimple_bb (use_stmt));
2746 gsi_remove (&gsi, true);
2748 /* And fixup the flags on the single remaining edge. */
2749 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2750 te->flags &= ~EDGE_ABNORMAL;
2751 te->flags |= EDGE_FALLTHRU;
2752 if (te->probability > REG_BR_PROB_BASE)
2753 te->probability = REG_BR_PROB_BASE;
2758 /* Ensure there is nothing else to do. */
2759 gcc_assert (!all || has_zero_uses (lhs));
2761 /* If we were able to propagate away all uses of LHS, then
2762 we can remove STMT. */
2763 if (all)
2764 remove_stmt_or_phi (stmt);
2768 /* STMT is either a PHI node (potentially a degenerate PHI node) or
2769 a statement that is a trivial copy or constant initialization.
2771 Attempt to eliminate T by propagating its RHS into all uses of
2772 its LHS. This may in turn set new bits in INTERESTING_NAMES
2773 for nodes we want to revisit later.
2775 All exit paths should clear INTERESTING_NAMES for the result
2776 of STMT. */
2778 static void
2779 eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
2781 tree lhs = get_lhs_or_phi_result (stmt);
2782 tree rhs;
2783 int version = SSA_NAME_VERSION (lhs);
2785 /* If the LHS of this statement or PHI has no uses, then we can
2786 just eliminate it. This can occur if, for example, the PHI
2787 was created by block duplication due to threading and its only
2788 use was in the conditional at the end of the block which was
2789 deleted. */
2790 if (has_zero_uses (lhs))
2792 bitmap_clear_bit (interesting_names, version);
2793 remove_stmt_or_phi (stmt);
2794 return;
2797 /* Get the RHS of the assignment or PHI node if the PHI is a
2798 degenerate. */
2799 rhs = get_rhs_or_phi_arg (stmt);
2800 if (!rhs)
2802 bitmap_clear_bit (interesting_names, version);
2803 return;
2806 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2808 /* Note that STMT may well have been deleted by now, so do
2809 not access it, instead use the saved version # to clear
2810 T's entry in the worklist. */
2811 bitmap_clear_bit (interesting_names, version);
2814 /* The first phase in degenerate PHI elimination.
2816 Eliminate the degenerate PHIs in BB, then recurse on the
2817 dominator children of BB. */
2819 static void
2820 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2822 gimple_stmt_iterator gsi;
2823 basic_block son;
2825 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2827 gimple phi = gsi_stmt (gsi);
2829 eliminate_const_or_copy (phi, interesting_names);
2832 /* Recurse into the dominator children of BB. */
2833 for (son = first_dom_son (CDI_DOMINATORS, bb);
2834 son;
2835 son = next_dom_son (CDI_DOMINATORS, son))
2836 eliminate_degenerate_phis_1 (son, interesting_names);
2840 /* A very simple pass to eliminate degenerate PHI nodes from the
2841 IL. This is meant to be fast enough to be able to be run several
2842 times in the optimization pipeline.
2844 Certain optimizations, particularly those which duplicate blocks
2845 or remove edges from the CFG can create or expose PHIs which are
2846 trivial copies or constant initializations.
2848 While we could pick up these optimizations in DOM or with the
2849 combination of copy-prop and CCP, those solutions are far too
2850 heavy-weight for our needs.
2852 This implementation has two phases so that we can efficiently
2853 eliminate the first order degenerate PHIs and second order
2854 degenerate PHIs.
2856 The first phase performs a dominator walk to identify and eliminate
2857 the vast majority of the degenerate PHIs. When a degenerate PHI
2858 is identified and eliminated any affected statements or PHIs
2859 are put on a worklist.
2861 The second phase eliminates degenerate PHIs and trivial copies
2862 or constant initializations using the worklist. This is how we
2863 pick up the secondary optimization opportunities with minimal
2864 cost. */
2866 static unsigned int
2867 eliminate_degenerate_phis (void)
2869 bitmap interesting_names;
2870 bitmap interesting_names1;
2872 /* Bitmap of blocks which need EH information updated. We can not
2873 update it on-the-fly as doing so invalidates the dominator tree. */
2874 need_eh_cleanup = BITMAP_ALLOC (NULL);
2876 /* INTERESTING_NAMES is effectively our worklist, indexed by
2877 SSA_NAME_VERSION.
2879 A set bit indicates that the statement or PHI node which
2880 defines the SSA_NAME should be (re)examined to determine if
2881 it has become a degenerate PHI or trivial const/copy propagation
2882 opportunity.
2884 Experiments have show we generally get better compilation
2885 time behavior with bitmaps rather than sbitmaps. */
2886 interesting_names = BITMAP_ALLOC (NULL);
2887 interesting_names1 = BITMAP_ALLOC (NULL);
2889 calculate_dominance_info (CDI_DOMINATORS);
2890 cfg_altered = false;
2892 /* First phase. Eliminate degenerate PHIs via a dominator
2893 walk of the CFG.
2895 Experiments have indicated that we generally get better
2896 compile-time behavior by visiting blocks in the first
2897 phase in dominator order. Presumably this is because walking
2898 in dominator order leaves fewer PHIs for later examination
2899 by the worklist phase. */
2900 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2902 /* Second phase. Eliminate second order degenerate PHIs as well
2903 as trivial copies or constant initializations identified by
2904 the first phase or this phase. Basically we keep iterating
2905 until our set of INTERESTING_NAMEs is empty. */
2906 while (!bitmap_empty_p (interesting_names))
2908 unsigned int i;
2909 bitmap_iterator bi;
2911 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2912 changed during the loop. Copy it to another bitmap and
2913 use that. */
2914 bitmap_copy (interesting_names1, interesting_names);
2916 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2918 tree name = ssa_name (i);
2920 /* Ignore SSA_NAMEs that have been released because
2921 their defining statement was deleted (unreachable). */
2922 if (name)
2923 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2924 interesting_names);
2928 if (cfg_altered)
2929 free_dominance_info (CDI_DOMINATORS);
2931 /* Propagation of const and copies may make some EH edges dead. Purge
2932 such edges from the CFG as needed. */
2933 if (!bitmap_empty_p (need_eh_cleanup))
2935 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
2936 BITMAP_FREE (need_eh_cleanup);
2939 BITMAP_FREE (interesting_names);
2940 BITMAP_FREE (interesting_names1);
2941 return 0;
2944 struct gimple_opt_pass pass_phi_only_cprop =
2947 GIMPLE_PASS,
2948 "phicprop", /* name */
2949 gate_dominator, /* gate */
2950 eliminate_degenerate_phis, /* execute */
2951 NULL, /* sub */
2952 NULL, /* next */
2953 0, /* static_pass_number */
2954 TV_TREE_PHI_CPROP, /* tv_id */
2955 PROP_cfg | PROP_ssa, /* properties_required */
2956 0, /* properties_provided */
2957 0, /* properties_destroyed */
2958 0, /* todo_flags_start */
2959 TODO_cleanup_cfg
2960 | TODO_dump_func
2961 | TODO_ggc_collect
2962 | TODO_verify_ssa
2963 | TODO_verify_stmts
2964 | TODO_update_ssa /* todo_flags_finish */