* Merge with edge-vector-mergepoint-20040918.
[official-gcc.git] / gcc / tree-ssa-dom.c
blob86a33f2e6c77f966257b61736cf6c0c7fa8c6151
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "errors.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
46 /* This file implements optimizations on the dominator tree. */
48 /* Hash table with expressions made available during the renaming process.
49 When an assignment of the form X_i = EXPR is found, the statement is
50 stored in this table. If the same expression EXPR is later found on the
51 RHS of another statement, it is replaced with X_i (thus performing
52 global redundancy elimination). Similarly as we pass through conditionals
53 we record the conditional itself as having either a true or false value
54 in this table. */
55 static htab_t avail_exprs;
57 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
58 expressions it enters into the hash table along with a marker entry
59 (null). When we finish processing the block, we pop off entries and
60 remove the expressions from the global hash table until we hit the
61 marker. */
62 static varray_type avail_exprs_stack;
64 /* Stack of trees used to restore the global currdefs to its original
65 state after completing optimization of a block and its dominator children.
67 An SSA_NAME indicates that the current definition of the underlying
68 variable should be set to the given SSA_NAME.
70 A _DECL node indicates that the underlying variable has no current
71 definition.
73 A NULL node is used to mark the last node associated with the
74 current block. */
75 varray_type block_defs_stack;
77 /* Stack of statements we need to rescan during finalization for newly
78 exposed variables.
80 Statement rescanning must occur after the current block's available
81 expressions are removed from AVAIL_EXPRS. Else we may change the
82 hash code for an expression and be unable to find/remove it from
83 AVAIL_EXPRS. */
84 varray_type stmts_to_rescan;
86 /* Structure for entries in the expression hash table.
88 This requires more memory for the hash table entries, but allows us
89 to avoid creating silly tree nodes and annotations for conditionals,
90 eliminates 2 global hash tables and two block local varrays.
92 It also allows us to reduce the number of hash table lookups we
93 have to perform in lookup_avail_expr and finally it allows us to
94 significantly reduce the number of calls into the hashing routine
95 itself. */
97 struct expr_hash_elt
99 /* The value (lhs) of this expression. */
100 tree lhs;
102 /* The expression (rhs) we want to record. */
103 tree rhs;
105 /* The annotation if this element corresponds to a statement. */
106 stmt_ann_t ann;
108 /* The hash value for RHS/ann. */
109 hashval_t hash;
112 /* Table of constant values and copies indexed by SSA name. When the
113 renaming pass finds an assignment of a constant (X_i = C) or a copy
114 assignment from another SSA variable (X_i = Y_j), it creates a mapping
115 between X_i and the RHS in this table. This mapping is used later on,
116 when renaming uses of X_i. If an assignment to X_i is found in this
117 table, instead of using X_i, we use the RHS of the statement stored in
118 this table (thus performing very simplistic copy and constant
119 propagation). */
120 static varray_type const_and_copies;
122 /* Stack of dest,src pairs that need to be restored during finalization.
124 A NULL entry is used to mark the end of pairs which need to be
125 restored during finalization of this block. */
126 static varray_type const_and_copies_stack;
128 /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
129 know their exact value. */
130 static bitmap nonzero_vars;
132 /* Track whether or not we have changed the control flow graph. */
133 static bool cfg_altered;
135 /* Bitmap of blocks that have had EH statements cleaned. We should
136 remove their dead edges eventually. */
137 static bitmap need_eh_cleanup;
139 /* Statistics for dominator optimizations. */
140 struct opt_stats_d
142 long num_stmts;
143 long num_exprs_considered;
144 long num_re;
147 /* Value range propagation record. Each time we encounter a conditional
148 of the form SSA_NAME COND CONST we create a new vrp_element to record
149 how the condition affects the possible values SSA_NAME may have.
151 Each record contains the condition tested (COND), and the the range of
152 values the variable may legitimately have if COND is true. Note the
153 range of values may be a smaller range than COND specifies if we have
154 recorded other ranges for this variable. Each record also contains the
155 block in which the range was recorded for invalidation purposes.
157 Note that the current known range is computed lazily. This allows us
158 to avoid the overhead of computing ranges which are never queried.
160 When we encounter a conditional, we look for records which constrain
161 the SSA_NAME used in the condition. In some cases those records allow
162 us to determine the condition's result at compile time. In other cases
163 they may allow us to simplify the condition.
165 We also use value ranges to do things like transform signed div/mod
166 operations into unsigned div/mod or to simplify ABS_EXPRs.
168 Simple experiments have shown these optimizations to not be all that
169 useful on switch statements (much to my surprise). So switch statement
170 optimizations are not performed.
172 Note carefully we do not propagate information through each statement
173 in the block. ie, if we know variable X has a value defined of
174 [0, 25] and we encounter Y = X + 1, we do not track a value range
175 for Y (which would be [1, 26] if we cared). Similarly we do not
176 constrain values as we encounter narrowing typecasts, etc. */
178 struct vrp_element
180 /* The highest and lowest values the variable in COND may contain when
181 COND is true. Note this may not necessarily be the same values
182 tested by COND if the same variable was used in earlier conditionals.
184 Note this is computed lazily and thus can be NULL indicating that
185 the values have not been computed yet. */
186 tree low;
187 tree high;
189 /* The actual conditional we recorded. This is needed since we compute
190 ranges lazily. */
191 tree cond;
193 /* The basic block where this record was created. We use this to determine
194 when to remove records. */
195 basic_block bb;
198 static struct opt_stats_d opt_stats;
200 /* A virtual array holding value range records for the variable identified
201 by the index, SSA_VERSION. */
202 static varray_type vrp_data;
204 /* Datastructure for block local data used during the dominator walk.
205 We maintain a stack of these as we recursively walk down the
206 dominator tree. */
208 struct dom_walk_block_data
210 /* Similarly for the nonzero state of variables that needs to be
211 restored during finalization. */
212 varray_type nonzero_vars;
214 /* Array of variables which have their values constrained by operations
215 in this basic block. We use this during finalization to know
216 which variables need their VRP data updated. */
217 varray_type vrp_variables;
220 struct eq_expr_value
222 tree src;
223 tree dst;
226 /* Local functions. */
227 static void optimize_stmt (struct dom_walk_data *,
228 basic_block bb,
229 block_stmt_iterator);
230 static inline tree get_value_for (tree, varray_type table);
231 static inline void set_value_for (tree, tree, varray_type table);
232 static tree lookup_avail_expr (tree, bool);
233 static struct eq_expr_value get_eq_expr_value (tree, int,
234 basic_block, varray_type *);
235 static hashval_t avail_expr_hash (const void *);
236 static hashval_t real_avail_expr_hash (const void *);
237 static int avail_expr_eq (const void *, const void *);
238 static void htab_statistics (FILE *, htab_t);
239 static void record_cond (tree, tree);
240 static void record_dominating_conditions (tree);
241 static void record_const_or_copy (tree, tree);
242 static void record_equality (tree, tree);
243 static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
244 static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *,
245 tree, int);
246 static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int);
247 static tree simplify_switch_and_lookup_avail_expr (tree, int);
248 static tree find_equivalent_equality_comparison (tree);
249 static void record_range (tree, basic_block, varray_type *);
250 static bool extract_range_from_cond (tree, tree *, tree *, int *);
251 static void record_equivalences_from_phis (struct dom_walk_data *, basic_block);
252 static void record_equivalences_from_incoming_edge (struct dom_walk_data *,
253 basic_block);
254 static bool eliminate_redundant_computations (struct dom_walk_data *,
255 tree, stmt_ann_t);
256 static void record_equivalences_from_stmt (tree, varray_type *,
257 int, stmt_ann_t);
258 static void thread_across_edge (struct dom_walk_data *, edge);
259 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
260 static void dom_opt_initialize_block_local_data (struct dom_walk_data *,
261 basic_block, bool);
262 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
263 static void cprop_into_phis (struct dom_walk_data *, basic_block);
264 static void remove_local_expressions_from_table (void);
265 static void restore_vars_to_original_value (void);
266 static void restore_currdefs_to_original_value (void);
267 static void register_definitions_for_stmt (tree);
268 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
270 /* Local version of fold that doesn't introduce cruft. */
272 static tree
273 local_fold (tree t)
275 t = fold (t);
277 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
278 may have been added by fold, and "useless" type conversions that might
279 now be apparent due to propagation. */
280 STRIP_USELESS_TYPE_CONVERSION (t);
282 return t;
285 /* Return the value associated with variable VAR in TABLE. */
287 static inline tree
288 get_value_for (tree var, varray_type table)
290 return VARRAY_TREE (table, SSA_NAME_VERSION (var));
293 /* Associate VALUE to variable VAR in TABLE. */
295 static inline void
296 set_value_for (tree var, tree value, varray_type table)
298 VARRAY_TREE (table, SSA_NAME_VERSION (var)) = value;
301 /* Jump threading, redundancy elimination and const/copy propagation.
303 This pass may expose new symbols that need to be renamed into SSA. For
304 every new symbol exposed, its corresponding bit will be set in
305 VARS_TO_RENAME. */
307 static void
308 tree_ssa_dominator_optimize (void)
310 struct dom_walk_data walk_data;
311 unsigned int i;
313 for (i = 0; i < num_referenced_vars; i++)
314 var_ann (referenced_var (i))->current_def = NULL;
316 /* Mark loop edges so we avoid threading across loop boundaries.
317 This may result in transforming natural loop into irreducible
318 region. */
319 mark_dfs_back_edges ();
321 /* Create our hash tables. */
322 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
323 VARRAY_TREE_INIT (avail_exprs_stack, 20, "Available expression stack");
324 VARRAY_TREE_INIT (block_defs_stack, 20, "Block DEFS stack");
325 VARRAY_TREE_INIT (const_and_copies, num_ssa_names, "const_and_copies");
326 VARRAY_TREE_INIT (const_and_copies_stack, 20, "Block const_and_copies stack");
327 nonzero_vars = BITMAP_XMALLOC ();
328 VARRAY_GENERIC_PTR_INIT (vrp_data, num_ssa_names, "vrp_data");
329 need_eh_cleanup = BITMAP_XMALLOC ();
330 VARRAY_TREE_INIT (stmts_to_rescan, 20, "Statements to rescan");
332 /* Setup callbacks for the generic dominator tree walker. */
333 walk_data.walk_stmts_backward = false;
334 walk_data.dom_direction = CDI_DOMINATORS;
335 walk_data.initialize_block_local_data = dom_opt_initialize_block_local_data;
336 walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
337 walk_data.before_dom_children_walk_stmts = optimize_stmt;
338 walk_data.before_dom_children_after_stmts = cprop_into_phis;
339 walk_data.after_dom_children_before_stmts = NULL;
340 walk_data.after_dom_children_walk_stmts = NULL;
341 walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
342 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
343 When we attach more stuff we'll need to fill this out with a real
344 structure. */
345 walk_data.global_data = NULL;
346 walk_data.block_local_data_size = sizeof (struct dom_walk_block_data);
348 /* Now initialize the dominator walker. */
349 init_walk_dominator_tree (&walk_data);
351 calculate_dominance_info (CDI_DOMINATORS);
353 /* If we prove certain blocks are unreachable, then we want to
354 repeat the dominator optimization process as PHI nodes may
355 have turned into copies which allows better propagation of
356 values. So we repeat until we do not identify any new unreachable
357 blocks. */
360 /* Optimize the dominator tree. */
361 cfg_altered = false;
363 /* Recursively walk the dominator tree optimizing statements. */
364 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
366 /* If we exposed any new variables, go ahead and put them into
367 SSA form now, before we handle jump threading. This simplifies
368 interactions between rewriting of _DECL nodes into SSA form
369 and rewriting SSA_NAME nodes into SSA form after block
370 duplication and CFG manipulation. */
371 if (bitmap_first_set_bit (vars_to_rename) >= 0)
373 rewrite_into_ssa (false);
374 bitmap_clear (vars_to_rename);
377 /* Thread jumps, creating duplicate blocks as needed. */
378 cfg_altered = thread_through_all_blocks ();
380 /* Removal of statements may make some EH edges dead. Purge
381 such edges from the CFG as needed. */
382 if (bitmap_first_set_bit (need_eh_cleanup) >= 0)
384 cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
385 bitmap_zero (need_eh_cleanup);
388 free_dominance_info (CDI_DOMINATORS);
389 cfg_altered = cleanup_tree_cfg ();
390 calculate_dominance_info (CDI_DOMINATORS);
392 rewrite_ssa_into_ssa ();
394 if (VARRAY_ACTIVE_SIZE (const_and_copies) <= num_ssa_names)
396 VARRAY_GROW (const_and_copies, num_ssa_names);
397 VARRAY_GROW (vrp_data, num_ssa_names);
400 /* Reinitialize the various tables. */
401 bitmap_clear (nonzero_vars);
402 htab_empty (avail_exprs);
403 VARRAY_CLEAR (const_and_copies);
404 VARRAY_CLEAR (vrp_data);
406 for (i = 0; i < num_referenced_vars; i++)
407 var_ann (referenced_var (i))->current_def = NULL;
409 while (cfg_altered);
411 /* Debugging dumps. */
412 if (dump_file && (dump_flags & TDF_STATS))
413 dump_dominator_optimization_stats (dump_file);
415 /* We emptied the hash table earlier, now delete it completely. */
416 htab_delete (avail_exprs);
418 /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
419 CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
420 of the do-while loop above. */
422 /* And finalize the dominator walker. */
423 fini_walk_dominator_tree (&walk_data);
425 /* Free nonzero_vars. */
426 BITMAP_XFREE (nonzero_vars);
427 BITMAP_XFREE (need_eh_cleanup);
430 static bool
431 gate_dominator (void)
433 return flag_tree_dom != 0;
436 struct tree_opt_pass pass_dominator =
438 "dom", /* name */
439 gate_dominator, /* gate */
440 tree_ssa_dominator_optimize, /* execute */
441 NULL, /* sub */
442 NULL, /* next */
443 0, /* static_pass_number */
444 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
445 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
446 0, /* properties_provided */
447 0, /* properties_destroyed */
448 0, /* todo_flags_start */
449 TODO_dump_func | TODO_rename_vars
450 | TODO_verify_ssa, /* todo_flags_finish */
451 0 /* letter */
455 /* We are exiting BB, see if the target block begins with a conditional
456 jump which has a known value when reached via BB. */
458 static void
459 thread_across_edge (struct dom_walk_data *walk_data, edge e)
461 block_stmt_iterator bsi;
462 tree stmt = NULL;
463 tree phi;
465 /* Each PHI creates a temporary equivalence, record them. */
466 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
468 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
469 tree dst = PHI_RESULT (phi);
470 record_const_or_copy (dst, src);
471 register_new_def (dst, &block_defs_stack);
474 for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
476 tree lhs, cached_lhs;
478 stmt = bsi_stmt (bsi);
480 /* Ignore empty statements and labels. */
481 if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
482 continue;
484 /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
485 value, then stop our search here. Ideally when we stop a
486 search we stop on a COND_EXPR or SWITCH_EXPR. */
487 if (TREE_CODE (stmt) != MODIFY_EXPR
488 || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
489 break;
491 /* At this point we have a statement which assigns an RHS to an
492 SSA_VAR on the LHS. We want to prove that the RHS is already
493 available and that its value is held in the current definition
494 of the LHS -- meaning that this assignment is a NOP when
495 reached via edge E. */
496 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
497 cached_lhs = TREE_OPERAND (stmt, 1);
498 else
499 cached_lhs = lookup_avail_expr (stmt, false);
501 lhs = TREE_OPERAND (stmt, 0);
503 /* This can happen if we thread around to the start of a loop. */
504 if (lhs == cached_lhs)
505 break;
507 /* If we did not find RHS in the hash table, then try again after
508 temporarily const/copy propagating the operands. */
509 if (!cached_lhs)
511 /* Copy the operands. */
512 stmt_ann_t ann = stmt_ann (stmt);
513 use_optype uses = USE_OPS (ann);
514 vuse_optype vuses = VUSE_OPS (ann);
515 tree *uses_copy = xcalloc (NUM_USES (uses), sizeof (tree));
516 tree *vuses_copy = xcalloc (NUM_VUSES (vuses), sizeof (tree));
517 unsigned int i;
519 /* Make a copy of the uses into USES_COPY, then cprop into
520 the use operands. */
521 for (i = 0; i < NUM_USES (uses); i++)
523 tree tmp = NULL;
525 uses_copy[i] = USE_OP (uses, i);
526 if (TREE_CODE (USE_OP (uses, i)) == SSA_NAME)
527 tmp = get_value_for (USE_OP (uses, i), const_and_copies);
528 if (tmp)
529 SET_USE_OP (uses, i, tmp);
532 /* Similarly for virtual uses. */
533 for (i = 0; i < NUM_VUSES (vuses); i++)
535 tree tmp = NULL;
537 vuses_copy[i] = VUSE_OP (vuses, i);
538 if (TREE_CODE (VUSE_OP (vuses, i)) == SSA_NAME)
539 tmp = get_value_for (VUSE_OP (vuses, i), const_and_copies);
540 if (tmp)
541 SET_VUSE_OP (vuses, i, tmp);
544 /* Try to lookup the new expression. */
545 cached_lhs = lookup_avail_expr (stmt, false);
547 /* Restore the statement's original uses/defs. */
548 for (i = 0; i < NUM_USES (uses); i++)
549 SET_USE_OP (uses, i, uses_copy[i]);
551 for (i = 0; i < NUM_VUSES (vuses); i++)
552 SET_VUSE_OP (vuses, i, vuses_copy[i]);
554 free (uses_copy);
555 free (vuses_copy);
557 /* If we still did not find the expression in the hash table,
558 then we can not ignore this statement. */
559 if (! cached_lhs)
560 break;
563 /* If the expression in the hash table was not assigned to an
564 SSA_NAME, then we can not ignore this statement. */
565 if (TREE_CODE (cached_lhs) != SSA_NAME)
566 break;
568 /* If we have different underlying variables, then we can not
569 ignore this statement. */
570 if (SSA_NAME_VAR (cached_lhs) != SSA_NAME_VAR (lhs))
571 break;
573 /* If CACHED_LHS does not represent the current value of the undering
574 variable in CACHED_LHS/LHS, then we can not ignore this statement. */
575 if (var_ann (SSA_NAME_VAR (lhs))->current_def != cached_lhs)
576 break;
578 /* If we got here, then we can ignore this statement and continue
579 walking through the statements in the block looking for a threadable
580 COND_EXPR.
582 We want to record an equivalence lhs = cache_lhs so that if
583 the result of this statement is used later we can copy propagate
584 suitably. */
585 record_const_or_copy (lhs, cached_lhs);
586 register_new_def (lhs, &block_defs_stack);
589 /* If we stopped at a COND_EXPR or SWITCH_EXPR, then see if we know which
590 arm will be taken. */
591 if (stmt
592 && (TREE_CODE (stmt) == COND_EXPR
593 || TREE_CODE (stmt) == SWITCH_EXPR))
595 tree cond, cached_lhs;
596 edge e1;
597 edge_iterator ei;
599 /* Do not forward entry edges into the loop. In the case loop
600 has multiple entry edges we may end up in constructing irreducible
601 region.
602 ??? We may consider forwarding the edges in the case all incoming
603 edges forward to the same destination block. */
604 if (!e->flags & EDGE_DFS_BACK)
606 FOR_EACH_EDGE (e1, ei, e->dest->preds)
608 if (e1->flags & EDGE_DFS_BACK)
609 break;
611 if (e1)
612 return;
615 /* Now temporarily cprop the operands and try to find the resulting
616 expression in the hash tables. */
617 if (TREE_CODE (stmt) == COND_EXPR)
618 cond = COND_EXPR_COND (stmt);
619 else
620 cond = SWITCH_COND (stmt);
622 if (COMPARISON_CLASS_P (cond))
624 tree dummy_cond, op0, op1;
625 enum tree_code cond_code;
627 op0 = TREE_OPERAND (cond, 0);
628 op1 = TREE_OPERAND (cond, 1);
629 cond_code = TREE_CODE (cond);
631 /* Get the current value of both operands. */
632 if (TREE_CODE (op0) == SSA_NAME)
634 tree tmp = get_value_for (op0, const_and_copies);
635 if (tmp)
636 op0 = tmp;
639 if (TREE_CODE (op1) == SSA_NAME)
641 tree tmp = get_value_for (op1, const_and_copies);
642 if (tmp)
643 op1 = tmp;
646 /* Stuff the operator and operands into our dummy conditional
647 expression, creating the dummy conditional if necessary. */
648 dummy_cond = walk_data->global_data;
649 if (! dummy_cond)
651 dummy_cond = build (cond_code, boolean_type_node, op0, op1);
652 dummy_cond = build (COND_EXPR, void_type_node,
653 dummy_cond, NULL, NULL);
654 walk_data->global_data = dummy_cond;
656 else
658 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), cond_code);
659 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op0;
660 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) = op1;
663 /* If the conditional folds to an invariant, then we are done,
664 otherwise look it up in the hash tables. */
665 cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
666 if (! is_gimple_min_invariant (cached_lhs))
667 cached_lhs = lookup_avail_expr (dummy_cond, false);
668 if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
670 cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
671 NULL,
672 false);
675 /* We can have conditionals which just test the state of a
676 variable rather than use a relational operator. These are
677 simpler to handle. */
678 else if (TREE_CODE (cond) == SSA_NAME)
680 cached_lhs = cond;
681 cached_lhs = get_value_for (cached_lhs, const_and_copies);
682 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
683 cached_lhs = 0;
685 else
686 cached_lhs = lookup_avail_expr (stmt, false);
688 if (cached_lhs)
690 edge taken_edge = find_taken_edge (e->dest, cached_lhs);
691 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
693 if (dest == e->dest)
694 return;
696 /* If we have a known destination for the conditional, then
697 we can perform this optimization, which saves at least one
698 conditional jump each time it applies since we get to
699 bypass the conditional at our original destination. */
700 if (dest)
702 e->aux = taken_edge;
703 bb_ann (e->dest)->incoming_edge_threaded = true;
710 /* Initialize the local stacks.
712 AVAIL_EXPRS stores all the expressions made available in this block.
714 CONST_AND_COPIES stores var/value pairs to restore at the end of this
715 block.
717 NONZERO_VARS stores the vars which have a nonzero value made in this
718 block.
720 STMTS_TO_RESCAN is a list of statements we will rescan for operands.
722 VRP_VARIABLES is the list of variables which have had their values
723 constrained by an operation in this block.
725 These stacks are cleared in the finalization routine run for each
726 block. */
728 static void
729 dom_opt_initialize_block_local_data (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
730 basic_block bb ATTRIBUTE_UNUSED,
731 bool recycled ATTRIBUTE_UNUSED)
733 struct dom_walk_block_data *bd
734 = (struct dom_walk_block_data *)VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
736 /* We get cleared memory from the allocator, so if the memory is not
737 cleared, then we are re-using a previously allocated entry. In
738 that case, we can also re-use the underlying virtual arrays. Just
739 make sure we clear them before using them! */
740 if (recycled)
742 gcc_assert (!bd->nonzero_vars
743 || VARRAY_ACTIVE_SIZE (bd->nonzero_vars) == 0);
744 gcc_assert (!bd->vrp_variables
745 || VARRAY_ACTIVE_SIZE (bd->vrp_variables) == 0);
749 /* Initialize local stacks for this optimizer and record equivalences
750 upon entry to BB. Equivalences can come from the edge traversed to
751 reach BB or they may come from PHI nodes at the start of BB. */
753 static void
754 dom_opt_initialize_block (struct dom_walk_data *walk_data, basic_block bb)
756 if (dump_file && (dump_flags & TDF_DETAILS))
757 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
759 /* Push a marker on the stacks of local information so that we know how
760 far to unwind when we finalize this block. */
761 VARRAY_PUSH_TREE (avail_exprs_stack, NULL_TREE);
762 VARRAY_PUSH_TREE (block_defs_stack, NULL_TREE);
763 VARRAY_PUSH_TREE (const_and_copies_stack, NULL_TREE);
765 record_equivalences_from_incoming_edge (walk_data, bb);
767 /* PHI nodes can create equivalences too. */
768 record_equivalences_from_phis (walk_data, bb);
771 /* Given an expression EXPR (a relational expression or a statement),
772 initialize the hash table element pointed by by ELEMENT. */
774 static void
775 initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
777 /* Hash table elements may be based on conditional expressions or statements.
779 For the former case, we have no annotation and we want to hash the
780 conditional expression. In the latter case we have an annotation and
781 we want to record the expression the statement evaluates. */
782 if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
784 element->ann = NULL;
785 element->rhs = expr;
787 else if (TREE_CODE (expr) == COND_EXPR)
789 element->ann = stmt_ann (expr);
790 element->rhs = COND_EXPR_COND (expr);
792 else if (TREE_CODE (expr) == SWITCH_EXPR)
794 element->ann = stmt_ann (expr);
795 element->rhs = SWITCH_COND (expr);
797 else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
799 element->ann = stmt_ann (expr);
800 element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
802 else
804 element->ann = stmt_ann (expr);
805 element->rhs = TREE_OPERAND (expr, 1);
808 element->lhs = lhs;
809 element->hash = avail_expr_hash (element);
812 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
813 LIMIT entries left in LOCALs. */
815 static void
816 remove_local_expressions_from_table (void)
818 /* Remove all the expressions made available in this block. */
819 while (VARRAY_ACTIVE_SIZE (avail_exprs_stack) > 0)
821 struct expr_hash_elt element;
822 tree expr = VARRAY_TOP_TREE (avail_exprs_stack);
823 VARRAY_POP (avail_exprs_stack);
825 if (expr == NULL_TREE)
826 break;
828 initialize_hash_element (expr, NULL, &element);
829 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
833 /* Use the SSA_NAMES in LOCALS to restore TABLE to its original
834 state, stopping when there are LIMIT entries left in LOCALs. */
836 static void
837 restore_nonzero_vars_to_original_value (varray_type locals,
838 unsigned limit,
839 bitmap table)
841 if (!locals)
842 return;
844 while (VARRAY_ACTIVE_SIZE (locals) > limit)
846 tree name = VARRAY_TOP_TREE (locals);
847 VARRAY_POP (locals);
848 bitmap_clear_bit (table, SSA_NAME_VERSION (name));
852 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
853 CONST_AND_COPIES to its original state, stopping when we hit a
854 NULL marker. */
856 static void
857 restore_vars_to_original_value (void)
859 while (VARRAY_ACTIVE_SIZE (const_and_copies_stack) > 0)
861 tree prev_value, dest;
863 dest = VARRAY_TOP_TREE (const_and_copies_stack);
864 VARRAY_POP (const_and_copies_stack);
866 if (dest == NULL)
867 break;
869 prev_value = VARRAY_TOP_TREE (const_and_copies_stack);
870 VARRAY_POP (const_and_copies_stack);
872 set_value_for (dest, prev_value, const_and_copies);
876 /* Similar to restore_vars_to_original_value, except that it restores
877 CURRDEFS to its original value. */
878 static void
879 restore_currdefs_to_original_value (void)
881 /* Restore CURRDEFS to its original state. */
882 while (VARRAY_ACTIVE_SIZE (block_defs_stack) > 0)
884 tree tmp = VARRAY_TOP_TREE (block_defs_stack);
885 tree saved_def, var;
887 VARRAY_POP (block_defs_stack);
889 if (tmp == NULL_TREE)
890 break;
892 /* If we recorded an SSA_NAME, then make the SSA_NAME the current
893 definition of its underlying variable. If we recorded anything
894 else, it must have been an _DECL node and its current reaching
895 definition must have been NULL. */
896 if (TREE_CODE (tmp) == SSA_NAME)
898 saved_def = tmp;
899 var = SSA_NAME_VAR (saved_def);
901 else
903 saved_def = NULL;
904 var = tmp;
907 var_ann (var)->current_def = saved_def;
911 /* We have finished processing the dominator children of BB, perform
912 any finalization actions in preparation for leaving this node in
913 the dominator tree. */
915 static void
916 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
918 struct dom_walk_block_data *bd
919 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
920 tree last;
922 /* If we are at a leaf node in the dominator graph, see if we can thread
923 the edge from BB through its successor.
925 Do this before we remove entries from our equivalence tables. */
926 if (EDGE_COUNT (bb->succs) == 1
927 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
928 && (get_immediate_dominator (CDI_DOMINATORS, EDGE_SUCC (bb, 0)->dest) != bb
929 || phi_nodes (EDGE_SUCC (bb, 0)->dest)))
932 thread_across_edge (walk_data, EDGE_SUCC (bb, 0));
934 else if ((last = last_stmt (bb))
935 && TREE_CODE (last) == COND_EXPR
936 && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
937 || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
938 && EDGE_COUNT (bb->succs) == 2
939 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
940 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
942 edge true_edge, false_edge;
943 tree cond, inverted = NULL;
944 enum tree_code cond_code;
946 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
948 cond = COND_EXPR_COND (last);
949 cond_code = TREE_CODE (cond);
951 if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
952 inverted = invert_truthvalue (cond);
954 /* If the THEN arm is the end of a dominator tree or has PHI nodes,
955 then try to thread through its edge. */
956 if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb
957 || phi_nodes (true_edge->dest))
959 /* Push a marker onto the available expression stack so that we
960 unwind any expressions related to the TRUE arm before processing
961 the false arm below. */
962 VARRAY_PUSH_TREE (avail_exprs_stack, NULL_TREE);
963 VARRAY_PUSH_TREE (block_defs_stack, NULL_TREE);
964 VARRAY_PUSH_TREE (const_and_copies_stack, NULL_TREE);
966 /* Record any equivalences created by following this edge. */
967 if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
969 record_cond (cond, boolean_true_node);
970 record_dominating_conditions (cond);
971 record_cond (inverted, boolean_false_node);
973 else if (cond_code == SSA_NAME)
974 record_const_or_copy (cond, boolean_true_node);
976 /* Now thread the edge. */
977 thread_across_edge (walk_data, true_edge);
979 /* And restore the various tables to their state before
980 we threaded this edge. */
981 remove_local_expressions_from_table ();
982 restore_vars_to_original_value ();
983 restore_currdefs_to_original_value ();
986 /* Similarly for the ELSE arm. */
987 if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb
988 || phi_nodes (false_edge->dest))
990 /* Record any equivalences created by following this edge. */
991 if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
993 record_cond (cond, boolean_false_node);
994 record_cond (inverted, boolean_true_node);
995 record_dominating_conditions (inverted);
997 else if (cond_code == SSA_NAME)
998 record_const_or_copy (cond, boolean_false_node);
1000 thread_across_edge (walk_data, false_edge);
1002 /* No need to remove local expressions from our tables
1003 or restore vars to their original value as that will
1004 be done immediately below. */
1008 remove_local_expressions_from_table ();
1009 restore_nonzero_vars_to_original_value (bd->nonzero_vars, 0, nonzero_vars);
1010 restore_vars_to_original_value ();
1011 restore_currdefs_to_original_value ();
1013 /* Remove VRP records associated with this basic block. They are no
1014 longer valid.
1016 To be efficient, we note which variables have had their values
1017 constrained in this block. So walk over each variable in the
1018 VRP_VARIABLEs array. */
1019 while (bd->vrp_variables && VARRAY_ACTIVE_SIZE (bd->vrp_variables) > 0)
1021 tree var = VARRAY_TOP_TREE (bd->vrp_variables);
1023 /* Each variable has a stack of value range records. We want to
1024 invalidate those associated with our basic block. So we walk
1025 the array backwards popping off records associated with our
1026 block. Once we hit a record not associated with our block
1027 we are done. */
1028 varray_type var_vrp_records = VARRAY_GENERIC_PTR (vrp_data,
1029 SSA_NAME_VERSION (var));
1031 while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0)
1033 struct vrp_element *element
1034 = (struct vrp_element *)VARRAY_TOP_GENERIC_PTR (var_vrp_records);
1036 if (element->bb != bb)
1037 break;
1039 VARRAY_POP (var_vrp_records);
1042 VARRAY_POP (bd->vrp_variables);
1045 /* If we queued any statements to rescan in this block, then
1046 go ahead and rescan them now. */
1047 while (VARRAY_ACTIVE_SIZE (stmts_to_rescan) > 0)
1049 tree stmt = VARRAY_TOP_TREE (stmts_to_rescan);
1050 basic_block stmt_bb = bb_for_stmt (stmt);
1052 if (stmt_bb != bb)
1053 break;
1055 VARRAY_POP (stmts_to_rescan);
1056 mark_new_vars_to_rename (stmt, vars_to_rename);
1060 /* PHI nodes can create equivalences too.
1062 Ignoring any alternatives which are the same as the result, if
1063 all the alternatives are equal, then the PHI node creates an
1064 equivalence.
1066 Additionally, if all the PHI alternatives are known to have a nonzero
1067 value, then the result of this PHI is known to have a nonzero value,
1068 even if we do not know its exact value. */
1070 static void
1071 record_equivalences_from_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1072 basic_block bb)
1074 tree phi;
1076 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1078 tree lhs = PHI_RESULT (phi);
1079 tree rhs = NULL;
1080 int i;
1082 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1084 tree t = PHI_ARG_DEF (phi, i);
1086 if (TREE_CODE (t) == SSA_NAME || is_gimple_min_invariant (t))
1088 /* Ignore alternatives which are the same as our LHS. */
1089 if (operand_equal_p (lhs, t, 0))
1090 continue;
1092 /* If we have not processed an alternative yet, then set
1093 RHS to this alternative. */
1094 if (rhs == NULL)
1095 rhs = t;
1096 /* If we have processed an alternative (stored in RHS), then
1097 see if it is equal to this one. If it isn't, then stop
1098 the search. */
1099 else if (! operand_equal_p (rhs, t, 0))
1100 break;
1102 else
1103 break;
1106 /* If we had no interesting alternatives, then all the RHS alternatives
1107 must have been the same as LHS. */
1108 if (!rhs)
1109 rhs = lhs;
1111 /* If we managed to iterate through each PHI alternative without
1112 breaking out of the loop, then we have a PHI which may create
1113 a useful equivalence. We do not need to record unwind data for
1114 this, since this is a true assignment and not an equivalence
1115 inferred from a comparison. All uses of this ssa name are dominated
1116 by this assignment, so unwinding just costs time and space. */
1117 if (i == PHI_NUM_ARGS (phi)
1118 && may_propagate_copy (lhs, rhs))
1119 set_value_for (lhs, rhs, const_and_copies);
1121 /* Now see if we know anything about the nonzero property for the
1122 result of this PHI. */
1123 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1125 if (!PHI_ARG_NONZERO (phi, i))
1126 break;
1129 if (i == PHI_NUM_ARGS (phi))
1130 bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
1132 register_new_def (lhs, &block_defs_stack);
1136 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1137 return that edge. Otherwise return NULL. */
1138 static edge
1139 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1141 edge retval = NULL;
1142 edge e;
1143 edge_iterator ei;
1145 FOR_EACH_EDGE (e, ei, bb->preds)
1147 /* A loop back edge can be identified by the destination of
1148 the edge dominating the source of the edge. */
1149 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1150 continue;
1152 /* If we have already seen a non-loop edge, then we must have
1153 multiple incoming non-loop edges and thus we return NULL. */
1154 if (retval)
1155 return NULL;
1157 /* This is the first non-loop incoming edge we have found. Record
1158 it. */
1159 retval = e;
1162 return retval;
1165 /* Record any equivalences created by the incoming edge to BB. If BB
1166 has more than one incoming edge, then no equivalence is created. */
1168 static void
1169 record_equivalences_from_incoming_edge (struct dom_walk_data *walk_data,
1170 basic_block bb)
1172 int edge_flags;
1173 basic_block parent;
1174 struct eq_expr_value eq_expr_value;
1175 tree parent_block_last_stmt = NULL;
1176 struct dom_walk_block_data *bd
1177 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
1179 /* If our parent block ended with a control statment, then we may be
1180 able to record some equivalences based on which outgoing edge from
1181 the parent was followed. */
1182 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1183 if (parent)
1185 parent_block_last_stmt = last_stmt (parent);
1186 if (parent_block_last_stmt && !is_ctrl_stmt (parent_block_last_stmt))
1187 parent_block_last_stmt = NULL;
1190 eq_expr_value.src = NULL;
1191 eq_expr_value.dst = NULL;
1193 /* If we have a single predecessor (ignoring loop backedges), then extract
1194 EDGE_FLAGS from the single incoming edge. Otherwise just return as
1195 there is nothing to do. */
1196 if (EDGE_COUNT (bb->preds) >= 1
1197 && parent_block_last_stmt)
1199 edge e = single_incoming_edge_ignoring_loop_edges (bb);
1200 if (e && bb_for_stmt (parent_block_last_stmt) == e->src)
1201 edge_flags = e->flags;
1202 else
1203 return;
1205 else
1206 return;
1208 /* If our parent block ended in a COND_EXPR, add any equivalences
1209 created by the COND_EXPR to the hash table and initialize
1210 EQ_EXPR_VALUE appropriately.
1212 EQ_EXPR_VALUE is an assignment expression created when BB's immediate
1213 dominator ends in a COND_EXPR statement whose predicate is of the form
1214 'VAR == VALUE', where VALUE may be another variable or a constant.
1215 This is used to propagate VALUE on the THEN_CLAUSE of that
1216 conditional. This assignment is inserted in CONST_AND_COPIES so that
1217 the copy and constant propagator can find more propagation
1218 opportunities. */
1219 if (TREE_CODE (parent_block_last_stmt) == COND_EXPR
1220 && (edge_flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1221 eq_expr_value = get_eq_expr_value (parent_block_last_stmt,
1222 (edge_flags & EDGE_TRUE_VALUE) != 0,
1224 &bd->vrp_variables);
1225 /* Similarly when the parent block ended in a SWITCH_EXPR.
1226 We can only know the value of the switch's condition if the dominator
1227 parent is also the only predecessor of this block. */
1228 else if (EDGE_PRED (bb, 0)->src == parent
1229 && TREE_CODE (parent_block_last_stmt) == SWITCH_EXPR)
1231 tree switch_cond = SWITCH_COND (parent_block_last_stmt);
1233 /* If the switch's condition is an SSA variable, then we may
1234 know its value at each of the case labels. */
1235 if (TREE_CODE (switch_cond) == SSA_NAME)
1237 tree switch_vec = SWITCH_LABELS (parent_block_last_stmt);
1238 size_t i, n = TREE_VEC_LENGTH (switch_vec);
1239 int case_count = 0;
1240 tree match_case = NULL_TREE;
1242 /* Search the case labels for those whose destination is
1243 the current basic block. */
1244 for (i = 0; i < n; ++i)
1246 tree elt = TREE_VEC_ELT (switch_vec, i);
1247 if (label_to_block (CASE_LABEL (elt)) == bb)
1249 if (++case_count > 1 || CASE_HIGH (elt))
1250 break;
1251 match_case = elt;
1255 /* If we encountered precisely one CASE_LABEL_EXPR and it
1256 was not the default case, or a case range, then we know
1257 the exact value of SWITCH_COND which caused us to get to
1258 this block. Record that equivalence in EQ_EXPR_VALUE. */
1259 if (case_count == 1
1260 && match_case
1261 && CASE_LOW (match_case)
1262 && !CASE_HIGH (match_case))
1264 eq_expr_value.dst = switch_cond;
1265 eq_expr_value.src = fold_convert (TREE_TYPE (switch_cond),
1266 CASE_LOW (match_case));
1271 /* If EQ_EXPR_VALUE (VAR == VALUE) is given, register the VALUE as a
1272 new value for VAR, so that occurrences of VAR can be replaced with
1273 VALUE while re-writing the THEN arm of a COND_EXPR. */
1274 if (eq_expr_value.src && eq_expr_value.dst)
1275 record_equality (eq_expr_value.dst, eq_expr_value.src);
1278 /* Dump SSA statistics on FILE. */
1280 void
1281 dump_dominator_optimization_stats (FILE *file)
1283 long n_exprs;
1285 fprintf (file, "Total number of statements: %6ld\n\n",
1286 opt_stats.num_stmts);
1287 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1288 opt_stats.num_exprs_considered);
1290 n_exprs = opt_stats.num_exprs_considered;
1291 if (n_exprs == 0)
1292 n_exprs = 1;
1294 fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
1295 opt_stats.num_re, PERCENT (opt_stats.num_re,
1296 n_exprs));
1298 fprintf (file, "\nHash table statistics:\n");
1300 fprintf (file, " avail_exprs: ");
1301 htab_statistics (file, avail_exprs);
1305 /* Dump SSA statistics on stderr. */
1307 void
1308 debug_dominator_optimization_stats (void)
1310 dump_dominator_optimization_stats (stderr);
1314 /* Dump statistics for the hash table HTAB. */
1316 static void
1317 htab_statistics (FILE *file, htab_t htab)
1319 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1320 (long) htab_size (htab),
1321 (long) htab_elements (htab),
1322 htab_collisions (htab));
1325 /* Record the fact that VAR has a nonzero value, though we may not know
1326 its exact value. Note that if VAR is already known to have a nonzero
1327 value, then we do nothing. */
1329 static void
1330 record_var_is_nonzero (tree var, varray_type *block_nonzero_vars_p)
1332 int indx = SSA_NAME_VERSION (var);
1334 if (bitmap_bit_p (nonzero_vars, indx))
1335 return;
1337 /* Mark it in the global table. */
1338 bitmap_set_bit (nonzero_vars, indx);
1340 /* Record this SSA_NAME so that we can reset the global table
1341 when we leave this block. */
1342 if (! *block_nonzero_vars_p)
1343 VARRAY_TREE_INIT (*block_nonzero_vars_p, 2, "block_nonzero_vars");
1344 VARRAY_PUSH_TREE (*block_nonzero_vars_p, var);
1347 /* Enter a statement into the true/false expression hash table indicating
1348 that the condition COND has the value VALUE. */
1350 static void
1351 record_cond (tree cond, tree value)
1353 struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
1354 void **slot;
1356 initialize_hash_element (cond, value, element);
1358 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1359 element->hash, true);
1360 if (*slot == NULL)
1362 *slot = (void *) element;
1363 VARRAY_PUSH_TREE (avail_exprs_stack, cond);
1365 else
1366 free (element);
1369 /* COND is a condition which is known to be true. Record variants of
1370 COND which must also be true.
1372 For example, if a < b is true, then a <= b must also be true. */
1374 static void
1375 record_dominating_conditions (tree cond)
1377 switch (TREE_CODE (cond))
1379 case LT_EXPR:
1380 record_cond (build2 (LE_EXPR, boolean_type_node,
1381 TREE_OPERAND (cond, 0),
1382 TREE_OPERAND (cond, 1)),
1383 boolean_true_node);
1384 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1385 TREE_OPERAND (cond, 0),
1386 TREE_OPERAND (cond, 1)),
1387 boolean_true_node);
1388 record_cond (build2 (NE_EXPR, boolean_type_node,
1389 TREE_OPERAND (cond, 0),
1390 TREE_OPERAND (cond, 1)),
1391 boolean_true_node);
1392 record_cond (build2 (LTGT_EXPR, boolean_type_node,
1393 TREE_OPERAND (cond, 0),
1394 TREE_OPERAND (cond, 1)),
1395 boolean_true_node);
1396 break;
1398 case GT_EXPR:
1399 record_cond (build2 (GE_EXPR, boolean_type_node,
1400 TREE_OPERAND (cond, 0),
1401 TREE_OPERAND (cond, 1)),
1402 boolean_true_node);
1403 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1404 TREE_OPERAND (cond, 0),
1405 TREE_OPERAND (cond, 1)),
1406 boolean_true_node);
1407 record_cond (build2 (NE_EXPR, boolean_type_node,
1408 TREE_OPERAND (cond, 0),
1409 TREE_OPERAND (cond, 1)),
1410 boolean_true_node);
1411 record_cond (build2 (LTGT_EXPR, boolean_type_node,
1412 TREE_OPERAND (cond, 0),
1413 TREE_OPERAND (cond, 1)),
1414 boolean_true_node);
1415 break;
1417 case GE_EXPR:
1418 case LE_EXPR:
1419 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1420 TREE_OPERAND (cond, 0),
1421 TREE_OPERAND (cond, 1)),
1422 boolean_true_node);
1423 break;
1425 case EQ_EXPR:
1426 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1427 TREE_OPERAND (cond, 0),
1428 TREE_OPERAND (cond, 1)),
1429 boolean_true_node);
1430 record_cond (build2 (LE_EXPR, boolean_type_node,
1431 TREE_OPERAND (cond, 0),
1432 TREE_OPERAND (cond, 1)),
1433 boolean_true_node);
1434 record_cond (build2 (GE_EXPR, boolean_type_node,
1435 TREE_OPERAND (cond, 0),
1436 TREE_OPERAND (cond, 1)),
1437 boolean_true_node);
1438 break;
1440 case UNORDERED_EXPR:
1441 record_cond (build2 (NE_EXPR, boolean_type_node,
1442 TREE_OPERAND (cond, 0),
1443 TREE_OPERAND (cond, 1)),
1444 boolean_true_node);
1445 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1446 TREE_OPERAND (cond, 0),
1447 TREE_OPERAND (cond, 1)),
1448 boolean_true_node);
1449 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1450 TREE_OPERAND (cond, 0),
1451 TREE_OPERAND (cond, 1)),
1452 boolean_true_node);
1453 record_cond (build2 (UNEQ_EXPR, boolean_type_node,
1454 TREE_OPERAND (cond, 0),
1455 TREE_OPERAND (cond, 1)),
1456 boolean_true_node);
1457 record_cond (build2 (UNLT_EXPR, boolean_type_node,
1458 TREE_OPERAND (cond, 0),
1459 TREE_OPERAND (cond, 1)),
1460 boolean_true_node);
1461 record_cond (build2 (UNGT_EXPR, boolean_type_node,
1462 TREE_OPERAND (cond, 0),
1463 TREE_OPERAND (cond, 1)),
1464 boolean_true_node);
1465 break;
1467 case UNLT_EXPR:
1468 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1469 TREE_OPERAND (cond, 0),
1470 TREE_OPERAND (cond, 1)),
1471 boolean_true_node);
1472 record_cond (build2 (NE_EXPR, boolean_type_node,
1473 TREE_OPERAND (cond, 0),
1474 TREE_OPERAND (cond, 1)),
1475 boolean_true_node);
1476 break;
1478 case UNGT_EXPR:
1479 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1480 TREE_OPERAND (cond, 0),
1481 TREE_OPERAND (cond, 1)),
1482 boolean_true_node);
1483 record_cond (build2 (NE_EXPR, boolean_type_node,
1484 TREE_OPERAND (cond, 0),
1485 TREE_OPERAND (cond, 1)),
1486 boolean_true_node);
1487 break;
1489 case UNEQ_EXPR:
1490 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1491 TREE_OPERAND (cond, 0),
1492 TREE_OPERAND (cond, 1)),
1493 boolean_true_node);
1494 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1495 TREE_OPERAND (cond, 0),
1496 TREE_OPERAND (cond, 1)),
1497 boolean_true_node);
1498 break;
1500 case LTGT_EXPR:
1501 record_cond (build2 (NE_EXPR, boolean_type_node,
1502 TREE_OPERAND (cond, 0),
1503 TREE_OPERAND (cond, 1)),
1504 boolean_true_node);
1505 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1506 TREE_OPERAND (cond, 0),
1507 TREE_OPERAND (cond, 1)),
1508 boolean_true_node);
1510 default:
1511 break;
1515 /* A helper function for record_const_or_copy and record_equality.
1516 Do the work of recording the value and undo info. */
1518 static void
1519 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1521 set_value_for (x, y, const_and_copies);
1523 VARRAY_PUSH_TREE (const_and_copies_stack, prev_x);
1524 VARRAY_PUSH_TREE (const_and_copies_stack, x);
1527 /* Record that X is equal to Y in const_and_copies. Record undo
1528 information in the block-local varray. */
1530 static void
1531 record_const_or_copy (tree x, tree y)
1533 tree prev_x = get_value_for (x, const_and_copies);
1535 if (TREE_CODE (y) == SSA_NAME)
1537 tree tmp = get_value_for (y, const_and_copies);
1538 if (tmp)
1539 y = tmp;
1542 record_const_or_copy_1 (x, y, prev_x);
1545 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1546 This constrains the cases in which we may treat this as assignment. */
1548 static void
1549 record_equality (tree x, tree y)
1551 tree prev_x = NULL, prev_y = NULL;
1553 if (TREE_CODE (x) == SSA_NAME)
1554 prev_x = get_value_for (x, const_and_copies);
1555 if (TREE_CODE (y) == SSA_NAME)
1556 prev_y = get_value_for (y, const_and_copies);
1558 /* If one of the previous values is invariant, then use that.
1559 Otherwise it doesn't matter which value we choose, just so
1560 long as we canonicalize on one value. */
1561 if (TREE_INVARIANT (y))
1563 else if (TREE_INVARIANT (x))
1564 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1565 else if (prev_x && TREE_INVARIANT (prev_x))
1566 x = y, y = prev_x, prev_x = prev_y;
1567 else if (prev_y)
1568 y = prev_y;
1570 /* After the swapping, we must have one SSA_NAME. */
1571 if (TREE_CODE (x) != SSA_NAME)
1572 return;
1574 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1575 variable compared against zero. If we're honoring signed zeros,
1576 then we cannot record this value unless we know that the value is
1577 nonzero. */
1578 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1579 && (TREE_CODE (y) != REAL_CST
1580 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1581 return;
1583 record_const_or_copy_1 (x, y, prev_x);
1586 /* STMT is a MODIFY_EXPR for which we were unable to find RHS in the
1587 hash tables. Try to simplify the RHS using whatever equivalences
1588 we may have recorded.
1590 If we are able to simplify the RHS, then lookup the simplified form in
1591 the hash table and return the result. Otherwise return NULL. */
1593 static tree
1594 simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data,
1595 tree stmt, int insert)
1597 tree rhs = TREE_OPERAND (stmt, 1);
1598 enum tree_code rhs_code = TREE_CODE (rhs);
1599 tree result = NULL;
1601 /* If we have lhs = ~x, look and see if we earlier had x = ~y.
1602 In which case we can change this statement to be lhs = y.
1603 Which can then be copy propagated.
1605 Similarly for negation. */
1606 if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR)
1607 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1609 /* Get the definition statement for our RHS. */
1610 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1612 /* See if the RHS_DEF_STMT has the same form as our statement. */
1613 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
1614 && TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code)
1616 tree rhs_def_operand;
1618 rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0);
1620 /* Verify that RHS_DEF_OPERAND is a suitable SSA variable. */
1621 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1622 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1623 result = update_rhs_and_lookup_avail_expr (stmt,
1624 rhs_def_operand,
1625 insert);
1629 /* If we have z = (x OP C1), see if we earlier had x = y OP C2.
1630 If OP is associative, create and fold (y OP C2) OP C1 which
1631 should result in (y OP C3), use that as the RHS for the
1632 assignment. Add minus to this, as we handle it specially below. */
1633 if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
1634 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
1635 && is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
1637 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1639 /* See if the RHS_DEF_STMT has the same form as our statement. */
1640 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR)
1642 tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
1643 enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
1645 if (rhs_code == rhs_def_code
1646 || (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
1647 || (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
1649 tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0);
1650 tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1);
1652 if (TREE_CODE (def_stmt_op0) == SSA_NAME
1653 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0)
1654 && is_gimple_min_invariant (def_stmt_op1))
1656 tree outer_const = TREE_OPERAND (rhs, 1);
1657 tree type = TREE_TYPE (TREE_OPERAND (stmt, 0));
1658 tree t;
1660 /* If we care about correct floating point results, then
1661 don't fold x + c1 - c2. Note that we need to take both
1662 the codes and the signs to figure this out. */
1663 if (FLOAT_TYPE_P (type)
1664 && !flag_unsafe_math_optimizations
1665 && (rhs_def_code == PLUS_EXPR
1666 || rhs_def_code == MINUS_EXPR))
1668 bool neg = false;
1670 neg ^= (rhs_code == MINUS_EXPR);
1671 neg ^= (rhs_def_code == MINUS_EXPR);
1672 neg ^= real_isneg (TREE_REAL_CST_PTR (outer_const));
1673 neg ^= real_isneg (TREE_REAL_CST_PTR (def_stmt_op1));
1675 if (neg)
1676 goto dont_fold_assoc;
1679 /* Ho hum. So fold will only operate on the outermost
1680 thingy that we give it, so we have to build the new
1681 expression in two pieces. This requires that we handle
1682 combinations of plus and minus. */
1683 if (rhs_def_code != rhs_code)
1685 if (rhs_def_code == MINUS_EXPR)
1686 t = build (MINUS_EXPR, type, outer_const, def_stmt_op1);
1687 else
1688 t = build (MINUS_EXPR, type, def_stmt_op1, outer_const);
1689 rhs_code = PLUS_EXPR;
1691 else if (rhs_def_code == MINUS_EXPR)
1692 t = build (PLUS_EXPR, type, def_stmt_op1, outer_const);
1693 else
1694 t = build (rhs_def_code, type, def_stmt_op1, outer_const);
1695 t = local_fold (t);
1696 t = build (rhs_code, type, def_stmt_op0, t);
1697 t = local_fold (t);
1699 /* If the result is a suitable looking gimple expression,
1700 then use it instead of the original for STMT. */
1701 if (TREE_CODE (t) == SSA_NAME
1702 || (UNARY_CLASS_P (t)
1703 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1704 || ((BINARY_CLASS_P (t) || COMPARISON_CLASS_P (t))
1705 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
1706 && is_gimple_val (TREE_OPERAND (t, 1))))
1707 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1711 dont_fold_assoc:;
1714 /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR
1715 and BIT_AND_EXPR respectively if the first operand is greater
1716 than zero and the second operand is an exact power of two. */
1717 if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR)
1718 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
1719 && integer_pow2p (TREE_OPERAND (rhs, 1)))
1721 tree val;
1722 tree op = TREE_OPERAND (rhs, 0);
1724 if (TYPE_UNSIGNED (TREE_TYPE (op)))
1726 val = integer_one_node;
1728 else
1730 tree dummy_cond = walk_data->global_data;
1732 if (! dummy_cond)
1734 dummy_cond = build (GT_EXPR, boolean_type_node,
1735 op, integer_zero_node);
1736 dummy_cond = build (COND_EXPR, void_type_node,
1737 dummy_cond, NULL, NULL);
1738 walk_data->global_data = dummy_cond;
1740 else
1742 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GT_EXPR);
1743 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1744 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1745 = integer_zero_node;
1747 val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
1750 if (val && integer_onep (val))
1752 tree t;
1753 tree op0 = TREE_OPERAND (rhs, 0);
1754 tree op1 = TREE_OPERAND (rhs, 1);
1756 if (rhs_code == TRUNC_DIV_EXPR)
1757 t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0,
1758 build_int_cst (NULL_TREE, tree_log2 (op1)));
1759 else
1760 t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0,
1761 local_fold (build (MINUS_EXPR, TREE_TYPE (op1),
1762 op1, integer_one_node)));
1764 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1768 /* Transform ABS (X) into X or -X as appropriate. */
1769 if (rhs_code == ABS_EXPR
1770 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
1772 tree val;
1773 tree op = TREE_OPERAND (rhs, 0);
1774 tree type = TREE_TYPE (op);
1776 if (TYPE_UNSIGNED (type))
1778 val = integer_zero_node;
1780 else
1782 tree dummy_cond = walk_data->global_data;
1784 if (! dummy_cond)
1786 dummy_cond = build (LE_EXPR, boolean_type_node,
1787 op, integer_zero_node);
1788 dummy_cond = build (COND_EXPR, void_type_node,
1789 dummy_cond, NULL, NULL);
1790 walk_data->global_data = dummy_cond;
1792 else
1794 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), LE_EXPR);
1795 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1796 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1797 = build_int_cst (type, 0);
1799 val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
1801 if (!val)
1803 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GE_EXPR);
1804 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1805 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1806 = build_int_cst (type, 0);
1808 val = simplify_cond_and_lookup_avail_expr (dummy_cond,
1809 NULL, false);
1811 if (val)
1813 if (integer_zerop (val))
1814 val = integer_one_node;
1815 else if (integer_onep (val))
1816 val = integer_zero_node;
1821 if (val
1822 && (integer_onep (val) || integer_zerop (val)))
1824 tree t;
1826 if (integer_onep (val))
1827 t = build1 (NEGATE_EXPR, TREE_TYPE (op), op);
1828 else
1829 t = op;
1831 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1835 /* Optimize *"foo" into 'f'. This is done here rather than
1836 in fold to avoid problems with stuff like &*"foo". */
1837 if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
1839 tree t = fold_read_from_constant_string (rhs);
1841 if (t)
1842 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1845 return result;
1848 /* COND is a condition of the form:
1850 x == const or x != const
1852 Look back to x's defining statement and see if x is defined as
1854 x = (type) y;
1856 If const is unchanged if we convert it to type, then we can build
1857 the equivalent expression:
1860 y == const or y != const
1862 Which may allow further optimizations.
1864 Return the equivalent comparison or NULL if no such equivalent comparison
1865 was found. */
1867 static tree
1868 find_equivalent_equality_comparison (tree cond)
1870 tree op0 = TREE_OPERAND (cond, 0);
1871 tree op1 = TREE_OPERAND (cond, 1);
1872 tree def_stmt = SSA_NAME_DEF_STMT (op0);
1874 /* OP0 might have been a parameter, so first make sure it
1875 was defined by a MODIFY_EXPR. */
1876 if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR)
1878 tree def_rhs = TREE_OPERAND (def_stmt, 1);
1880 /* Now make sure the RHS of the MODIFY_EXPR is a typecast. */
1881 if ((TREE_CODE (def_rhs) == NOP_EXPR
1882 || TREE_CODE (def_rhs) == CONVERT_EXPR)
1883 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
1885 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
1886 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
1887 tree new;
1889 if (TYPE_PRECISION (def_rhs_inner_type)
1890 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
1891 return NULL;
1893 /* What we want to prove is that if we convert OP1 to
1894 the type of the object inside the NOP_EXPR that the
1895 result is still equivalent to SRC.
1897 If that is true, the build and return new equivalent
1898 condition which uses the source of the typecast and the
1899 new constant (which has only changed its type). */
1900 new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
1901 new = local_fold (new);
1902 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
1903 return build (TREE_CODE (cond), TREE_TYPE (cond),
1904 def_rhs_inner, new);
1907 return NULL;
1910 /* STMT is a COND_EXPR for which we could not trivially determine its
1911 result. This routine attempts to find equivalent forms of the
1912 condition which we may be able to optimize better. It also
1913 uses simple value range propagation to optimize conditionals. */
1915 static tree
1916 simplify_cond_and_lookup_avail_expr (tree stmt,
1917 stmt_ann_t ann,
1918 int insert)
1920 tree cond = COND_EXPR_COND (stmt);
1922 if (COMPARISON_CLASS_P (cond))
1924 tree op0 = TREE_OPERAND (cond, 0);
1925 tree op1 = TREE_OPERAND (cond, 1);
1927 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
1929 int limit;
1930 tree low, high, cond_low, cond_high;
1931 int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
1932 varray_type vrp_records;
1933 struct vrp_element *element;
1935 /* First see if we have test of an SSA_NAME against a constant
1936 where the SSA_NAME is defined by an earlier typecast which
1937 is irrelevant when performing tests against the given
1938 constant. */
1939 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1941 tree new_cond = find_equivalent_equality_comparison (cond);
1943 if (new_cond)
1945 /* Update the statement to use the new equivalent
1946 condition. */
1947 COND_EXPR_COND (stmt) = new_cond;
1949 /* If this is not a real stmt, ann will be NULL and we
1950 avoid processing the operands. */
1951 if (ann)
1952 modify_stmt (stmt);
1954 /* Lookup the condition and return its known value if it
1955 exists. */
1956 new_cond = lookup_avail_expr (stmt, insert);
1957 if (new_cond)
1958 return new_cond;
1960 /* The operands have changed, so update op0 and op1. */
1961 op0 = TREE_OPERAND (cond, 0);
1962 op1 = TREE_OPERAND (cond, 1);
1966 /* Consult the value range records for this variable (if they exist)
1967 to see if we can eliminate or simplify this conditional.
1969 Note two tests are necessary to determine no records exist.
1970 First we have to see if the virtual array exists, if it
1971 exists, then we have to check its active size.
1973 Also note the vast majority of conditionals are not testing
1974 a variable which has had its range constrained by an earlier
1975 conditional. So this filter avoids a lot of unnecessary work. */
1976 vrp_records = VARRAY_GENERIC_PTR (vrp_data, SSA_NAME_VERSION (op0));
1977 if (vrp_records == NULL)
1978 return NULL;
1980 limit = VARRAY_ACTIVE_SIZE (vrp_records);
1982 /* If we have no value range records for this variable, or we are
1983 unable to extract a range for this condition, then there is
1984 nothing to do. */
1985 if (limit == 0
1986 || ! extract_range_from_cond (cond, &cond_high,
1987 &cond_low, &cond_inverted))
1988 return NULL;
1990 /* We really want to avoid unnecessary computations of range
1991 info. So all ranges are computed lazily; this avoids a
1992 lot of unnecessary work. ie, we record the conditional,
1993 but do not process how it constrains the variable's
1994 potential values until we know that processing the condition
1995 could be helpful.
1997 However, we do not want to have to walk a potentially long
1998 list of ranges, nor do we want to compute a variable's
1999 range more than once for a given path.
2001 Luckily, each time we encounter a conditional that can not
2002 be otherwise optimized we will end up here and we will
2003 compute the necessary range information for the variable
2004 used in this condition.
2006 Thus you can conclude that there will never be more than one
2007 conditional associated with a variable which has not been
2008 processed. So we never need to merge more than one new
2009 conditional into the current range.
2011 These properties also help us avoid unnecessary work. */
2012 element
2013 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, limit - 1);
2015 if (element->high && element->low)
2017 /* The last element has been processed, so there is no range
2018 merging to do, we can simply use the high/low values
2019 recorded in the last element. */
2020 low = element->low;
2021 high = element->high;
2023 else
2025 tree tmp_high, tmp_low;
2026 int dummy;
2028 /* The last element has not been processed. Process it now. */
2029 extract_range_from_cond (element->cond, &tmp_high,
2030 &tmp_low, &dummy);
2032 /* If this is the only element, then no merging is necessary,
2033 the high/low values from extract_range_from_cond are all
2034 we need. */
2035 if (limit == 1)
2037 low = tmp_low;
2038 high = tmp_high;
2040 else
2042 /* Get the high/low value from the previous element. */
2043 struct vrp_element *prev
2044 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records,
2045 limit - 2);
2046 low = prev->low;
2047 high = prev->high;
2049 /* Merge in this element's range with the range from the
2050 previous element.
2052 The low value for the merged range is the maximum of
2053 the previous low value and the low value of this record.
2055 Similarly the high value for the merged range is the
2056 minimum of the previous high value and the high value of
2057 this record. */
2058 low = (tree_int_cst_compare (low, tmp_low) == 1
2059 ? low : tmp_low);
2060 high = (tree_int_cst_compare (high, tmp_high) == -1
2061 ? high : tmp_high);
2064 /* And record the computed range. */
2065 element->low = low;
2066 element->high = high;
2070 /* After we have constrained this variable's potential values,
2071 we try to determine the result of the given conditional.
2073 To simplify later tests, first determine if the current
2074 low value is the same low value as the conditional.
2075 Similarly for the current high value and the high value
2076 for the conditional. */
2077 lowequal = tree_int_cst_equal (low, cond_low);
2078 highequal = tree_int_cst_equal (high, cond_high);
2080 if (lowequal && highequal)
2081 return (cond_inverted ? boolean_false_node : boolean_true_node);
2083 /* To simplify the overlap/subset tests below we may want
2084 to swap the two ranges so that the larger of the two
2085 ranges occurs "first". */
2086 swapped = 0;
2087 if (tree_int_cst_compare (low, cond_low) == 1
2088 || (lowequal
2089 && tree_int_cst_compare (cond_high, high) == 1))
2091 tree temp;
2093 swapped = 1;
2094 temp = low;
2095 low = cond_low;
2096 cond_low = temp;
2097 temp = high;
2098 high = cond_high;
2099 cond_high = temp;
2102 /* Now determine if there is no overlap in the ranges
2103 or if the second range is a subset of the first range. */
2104 no_overlap = tree_int_cst_lt (high, cond_low);
2105 subset = tree_int_cst_compare (cond_high, high) != 1;
2107 /* If there was no overlap in the ranges, then this conditional
2108 always has a false value (unless we had to invert this
2109 conditional, in which case it always has a true value). */
2110 if (no_overlap)
2111 return (cond_inverted ? boolean_true_node : boolean_false_node);
2113 /* If the current range is a subset of the condition's range,
2114 then this conditional always has a true value (unless we
2115 had to invert this conditional, in which case it always
2116 has a true value). */
2117 if (subset && swapped)
2118 return (cond_inverted ? boolean_false_node : boolean_true_node);
2120 /* We were unable to determine the result of the conditional.
2121 However, we may be able to simplify the conditional. First
2122 merge the ranges in the same manner as range merging above. */
2123 low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
2124 high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
2126 /* If the range has converged to a single point, then turn this
2127 into an equality comparison. */
2128 if (TREE_CODE (cond) != EQ_EXPR
2129 && TREE_CODE (cond) != NE_EXPR
2130 && tree_int_cst_equal (low, high))
2132 TREE_SET_CODE (cond, EQ_EXPR);
2133 TREE_OPERAND (cond, 1) = high;
2137 return 0;
2140 /* STMT is a SWITCH_EXPR for which we could not trivially determine its
2141 result. This routine attempts to find equivalent forms of the
2142 condition which we may be able to optimize better. */
2144 static tree
2145 simplify_switch_and_lookup_avail_expr (tree stmt, int insert)
2147 tree cond = SWITCH_COND (stmt);
2148 tree def, to, ti;
2150 /* The optimization that we really care about is removing unnecessary
2151 casts. That will let us do much better in propagating the inferred
2152 constant at the switch target. */
2153 if (TREE_CODE (cond) == SSA_NAME)
2155 def = SSA_NAME_DEF_STMT (cond);
2156 if (TREE_CODE (def) == MODIFY_EXPR)
2158 def = TREE_OPERAND (def, 1);
2159 if (TREE_CODE (def) == NOP_EXPR)
2161 int need_precision;
2162 bool fail;
2164 def = TREE_OPERAND (def, 0);
2166 #ifdef ENABLE_CHECKING
2167 /* ??? Why was Jeff testing this? We are gimple... */
2168 gcc_assert (is_gimple_val (def));
2169 #endif
2171 to = TREE_TYPE (cond);
2172 ti = TREE_TYPE (def);
2174 /* If we have an extension that preserves value, then we
2175 can copy the source value into the switch. */
2177 need_precision = TYPE_PRECISION (ti);
2178 fail = false;
2179 if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
2180 fail = true;
2181 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
2182 need_precision += 1;
2183 if (TYPE_PRECISION (to) < need_precision)
2184 fail = true;
2186 if (!fail)
2188 SWITCH_COND (stmt) = def;
2189 modify_stmt (stmt);
2191 return lookup_avail_expr (stmt, insert);
2197 return 0;
2201 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2202 known value for that SSA_NAME (or NULL if no value is known).
2204 NONZERO_VARS is the set SSA_NAMES known to have a nonzero value,
2205 even if we don't know their precise value.
2207 Propagate values from CONST_AND_COPIES and NONZERO_VARS into the PHI
2208 nodes of the successors of BB. */
2210 static void
2211 cprop_into_successor_phis (basic_block bb,
2212 varray_type const_and_copies,
2213 bitmap nonzero_vars)
2215 edge e;
2216 edge_iterator ei;
2218 /* This can get rather expensive if the implementation is naive in
2219 how it finds the phi alternative associated with a particular edge. */
2221 FOR_EACH_EDGE (e, ei, bb->succs)
2223 tree phi;
2224 int phi_num_args;
2225 int hint;
2227 /* If this is an abnormal edge, then we do not want to copy propagate
2228 into the PHI alternative associated with this edge. */
2229 if (e->flags & EDGE_ABNORMAL)
2230 continue;
2232 phi = phi_nodes (e->dest);
2233 if (! phi)
2234 continue;
2236 /* There is no guarantee that for any two PHI nodes in a block that
2237 the phi alternative associated with a particular edge will be
2238 at the same index in the phi alternative array.
2240 However, it is very likely they will be the same. So we keep
2241 track of the index of the alternative where we found the edge in
2242 the previous phi node and check that index first in the next
2243 phi node. If that hint fails, then we actually search all
2244 the entries. */
2245 phi_num_args = PHI_NUM_ARGS (phi);
2246 hint = phi_num_args;
2247 for ( ; phi; phi = PHI_CHAIN (phi))
2249 int i;
2250 tree new;
2251 use_operand_p orig_p;
2252 tree orig;
2254 /* If the hint is valid (!= phi_num_args), see if it points
2255 us to the desired phi alternative. */
2256 if (hint != phi_num_args && PHI_ARG_EDGE (phi, hint) == e)
2258 else
2260 /* The hint was either invalid or did not point to the
2261 correct phi alternative. Search all the alternatives
2262 for the correct one. Update the hint. */
2263 for (i = 0; i < phi_num_args; i++)
2264 if (PHI_ARG_EDGE (phi, i) == e)
2265 break;
2266 hint = i;
2269 /* If we did not find the proper alternative, then something is
2270 horribly wrong. */
2271 gcc_assert (hint != phi_num_args);
2273 /* The alternative may be associated with a constant, so verify
2274 it is an SSA_NAME before doing anything with it. */
2275 orig_p = PHI_ARG_DEF_PTR (phi, hint);
2276 orig = USE_FROM_PTR (orig_p);
2277 if (TREE_CODE (orig) != SSA_NAME)
2278 continue;
2280 /* If the alternative is known to have a nonzero value, record
2281 that fact in the PHI node itself for future use. */
2282 if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
2283 PHI_ARG_NONZERO (phi, hint) = true;
2285 /* If we have *ORIG_P in our constant/copy table, then replace
2286 ORIG_P with its value in our constant/copy table. */
2287 new = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (orig));
2288 if (new
2289 && (TREE_CODE (new) == SSA_NAME
2290 || is_gimple_min_invariant (new))
2291 && may_propagate_copy (orig, new))
2293 propagate_value (orig_p, new);
2300 /* Propagate known constants/copies into PHI nodes of BB's successor
2301 blocks. */
2303 static void
2304 cprop_into_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2305 basic_block bb)
2307 cprop_into_successor_phis (bb, const_and_copies, nonzero_vars);
2310 /* Search for redundant computations in STMT. If any are found, then
2311 replace them with the variable holding the result of the computation.
2313 If safe, record this expression into the available expression hash
2314 table. */
2316 static bool
2317 eliminate_redundant_computations (struct dom_walk_data *walk_data,
2318 tree stmt, stmt_ann_t ann)
2320 v_may_def_optype v_may_defs = V_MAY_DEF_OPS (ann);
2321 tree *expr_p, def = NULL_TREE;
2322 bool insert = true;
2323 tree cached_lhs;
2324 bool retval = false;
2326 if (TREE_CODE (stmt) == MODIFY_EXPR)
2327 def = TREE_OPERAND (stmt, 0);
2329 /* Certain expressions on the RHS can be optimized away, but can not
2330 themselves be entered into the hash tables. */
2331 if (ann->makes_aliased_stores
2332 || ! def
2333 || TREE_CODE (def) != SSA_NAME
2334 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2335 || NUM_V_MAY_DEFS (v_may_defs) != 0)
2336 insert = false;
2338 /* Check if the expression has been computed before. */
2339 cached_lhs = lookup_avail_expr (stmt, insert);
2341 /* If this is an assignment and the RHS was not in the hash table,
2342 then try to simplify the RHS and lookup the new RHS in the
2343 hash table. */
2344 if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
2345 cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data, stmt, insert);
2346 /* Similarly if this is a COND_EXPR and we did not find its
2347 expression in the hash table, simplify the condition and
2348 try again. */
2349 else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
2350 cached_lhs = simplify_cond_and_lookup_avail_expr (stmt, ann, insert);
2351 /* Similarly for a SWITCH_EXPR. */
2352 else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR)
2353 cached_lhs = simplify_switch_and_lookup_avail_expr (stmt, insert);
2355 opt_stats.num_exprs_considered++;
2357 /* Get a pointer to the expression we are trying to optimize. */
2358 if (TREE_CODE (stmt) == COND_EXPR)
2359 expr_p = &COND_EXPR_COND (stmt);
2360 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2361 expr_p = &SWITCH_COND (stmt);
2362 else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
2363 expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2364 else
2365 expr_p = &TREE_OPERAND (stmt, 1);
2367 /* It is safe to ignore types here since we have already done
2368 type checking in the hashing and equality routines. In fact
2369 type checking here merely gets in the way of constant
2370 propagation. Also, make sure that it is safe to propagate
2371 CACHED_LHS into *EXPR_P. */
2372 if (cached_lhs
2373 && (TREE_CODE (cached_lhs) != SSA_NAME
2374 || may_propagate_copy (*expr_p, cached_lhs)))
2376 if (dump_file && (dump_flags & TDF_DETAILS))
2378 fprintf (dump_file, " Replaced redundant expr '");
2379 print_generic_expr (dump_file, *expr_p, dump_flags);
2380 fprintf (dump_file, "' with '");
2381 print_generic_expr (dump_file, cached_lhs, dump_flags);
2382 fprintf (dump_file, "'\n");
2385 opt_stats.num_re++;
2387 #if defined ENABLE_CHECKING
2388 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
2389 || is_gimple_min_invariant (cached_lhs));
2390 #endif
2392 if (TREE_CODE (cached_lhs) == ADDR_EXPR
2393 || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
2394 && is_gimple_min_invariant (cached_lhs)))
2395 retval = true;
2397 propagate_tree_value (expr_p, cached_lhs);
2398 modify_stmt (stmt);
2400 return retval;
2403 /* STMT, a MODIFY_EXPR, may create certain equivalences, in either
2404 the available expressions table or the const_and_copies table.
2405 Detect and record those equivalences. */
2407 static void
2408 record_equivalences_from_stmt (tree stmt,
2409 varray_type *block_nonzero_vars_p,
2410 int may_optimize_p,
2411 stmt_ann_t ann)
2413 tree lhs = TREE_OPERAND (stmt, 0);
2414 enum tree_code lhs_code = TREE_CODE (lhs);
2415 int i;
2417 if (lhs_code == SSA_NAME)
2419 tree rhs = TREE_OPERAND (stmt, 1);
2421 /* Strip away any useless type conversions. */
2422 STRIP_USELESS_TYPE_CONVERSION (rhs);
2424 /* If the RHS of the assignment is a constant or another variable that
2425 may be propagated, register it in the CONST_AND_COPIES table. We
2426 do not need to record unwind data for this, since this is a true
2427 assignment and not an equivalence inferred from a comparison. All
2428 uses of this ssa name are dominated by this assignment, so unwinding
2429 just costs time and space. */
2430 if (may_optimize_p
2431 && (TREE_CODE (rhs) == SSA_NAME
2432 || is_gimple_min_invariant (rhs)))
2433 set_value_for (lhs, rhs, const_and_copies);
2435 /* alloca never returns zero and the address of a non-weak symbol
2436 is never zero. NOP_EXPRs and CONVERT_EXPRs can be completely
2437 stripped as they do not affect this equivalence. */
2438 while (TREE_CODE (rhs) == NOP_EXPR
2439 || TREE_CODE (rhs) == CONVERT_EXPR)
2440 rhs = TREE_OPERAND (rhs, 0);
2442 if (alloca_call_p (rhs)
2443 || (TREE_CODE (rhs) == ADDR_EXPR
2444 && DECL_P (TREE_OPERAND (rhs, 0))
2445 && ! DECL_WEAK (TREE_OPERAND (rhs, 0))))
2446 record_var_is_nonzero (lhs, block_nonzero_vars_p);
2448 /* IOR of any value with a nonzero value will result in a nonzero
2449 value. Even if we do not know the exact result recording that
2450 the result is nonzero is worth the effort. */
2451 if (TREE_CODE (rhs) == BIT_IOR_EXPR
2452 && integer_nonzerop (TREE_OPERAND (rhs, 1)))
2453 record_var_is_nonzero (lhs, block_nonzero_vars_p);
2456 /* Look at both sides for pointer dereferences. If we find one, then
2457 the pointer must be nonnull and we can enter that equivalence into
2458 the hash tables. */
2459 if (flag_delete_null_pointer_checks)
2460 for (i = 0; i < 2; i++)
2462 tree t = TREE_OPERAND (stmt, i);
2464 /* Strip away any COMPONENT_REFs. */
2465 while (TREE_CODE (t) == COMPONENT_REF)
2466 t = TREE_OPERAND (t, 0);
2468 /* Now see if this is a pointer dereference. */
2469 if (TREE_CODE (t) == INDIRECT_REF)
2471 tree op = TREE_OPERAND (t, 0);
2473 /* If the pointer is a SSA variable, then enter new
2474 equivalences into the hash table. */
2475 while (TREE_CODE (op) == SSA_NAME)
2477 tree def = SSA_NAME_DEF_STMT (op);
2479 record_var_is_nonzero (op, block_nonzero_vars_p);
2481 /* And walk up the USE-DEF chains noting other SSA_NAMEs
2482 which are known to have a nonzero value. */
2483 if (def
2484 && TREE_CODE (def) == MODIFY_EXPR
2485 && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
2486 op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
2487 else
2488 break;
2493 /* A memory store, even an aliased store, creates a useful
2494 equivalence. By exchanging the LHS and RHS, creating suitable
2495 vops and recording the result in the available expression table,
2496 we may be able to expose more redundant loads. */
2497 if (!ann->has_volatile_ops
2498 && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
2499 || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
2500 && !is_gimple_reg (lhs))
2502 tree rhs = TREE_OPERAND (stmt, 1);
2503 tree new;
2505 /* FIXME: If the LHS of the assignment is a bitfield and the RHS
2506 is a constant, we need to adjust the constant to fit into the
2507 type of the LHS. If the LHS is a bitfield and the RHS is not
2508 a constant, then we can not record any equivalences for this
2509 statement since we would need to represent the widening or
2510 narrowing of RHS. This fixes gcc.c-torture/execute/921016-1.c
2511 and should not be necessary if GCC represented bitfields
2512 properly. */
2513 if (lhs_code == COMPONENT_REF
2514 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
2516 if (TREE_CONSTANT (rhs))
2517 rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
2518 else
2519 rhs = NULL;
2521 /* If the value overflowed, then we can not use this equivalence. */
2522 if (rhs && ! is_gimple_min_invariant (rhs))
2523 rhs = NULL;
2526 if (rhs)
2528 /* Build a new statement with the RHS and LHS exchanged. */
2529 new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
2531 create_ssa_artficial_load_stmt (&(ann->operands), new);
2533 /* Finally enter the statement into the available expression
2534 table. */
2535 lookup_avail_expr (new, true);
2540 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2541 CONST_AND_COPIES. */
2543 static bool
2544 cprop_operand (tree stmt, use_operand_p op_p, varray_type const_and_copies)
2546 bool may_have_exposed_new_symbols = false;
2547 tree val;
2548 tree op = USE_FROM_PTR (op_p);
2550 /* If the operand has a known constant value or it is known to be a
2551 copy of some other variable, use the value or copy stored in
2552 CONST_AND_COPIES. */
2553 val = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (op));
2554 if (val)
2556 tree op_type, val_type;
2558 /* Do not change the base variable in the virtual operand
2559 tables. That would make it impossible to reconstruct
2560 the renamed virtual operand if we later modify this
2561 statement. Also only allow the new value to be an SSA_NAME
2562 for propagation into virtual operands. */
2563 if (!is_gimple_reg (op)
2564 && (get_virtual_var (val) != get_virtual_var (op)
2565 || TREE_CODE (val) != SSA_NAME))
2566 return false;
2568 /* Get the toplevel type of each operand. */
2569 op_type = TREE_TYPE (op);
2570 val_type = TREE_TYPE (val);
2572 /* While both types are pointers, get the type of the object
2573 pointed to. */
2574 while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2576 op_type = TREE_TYPE (op_type);
2577 val_type = TREE_TYPE (val_type);
2580 /* Make sure underlying types match before propagating a constant by
2581 converting the constant to the proper type. Note that convert may
2582 return a non-gimple expression, in which case we ignore this
2583 propagation opportunity. */
2584 if (TREE_CODE (val) != SSA_NAME)
2586 if (!lang_hooks.types_compatible_p (op_type, val_type))
2588 val = fold_convert (TREE_TYPE (op), val);
2589 if (!is_gimple_min_invariant (val))
2590 return false;
2594 /* Certain operands are not allowed to be copy propagated due
2595 to their interaction with exception handling and some GCC
2596 extensions. */
2597 else if (!may_propagate_copy (op, val))
2598 return false;
2600 /* Dump details. */
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2603 fprintf (dump_file, " Replaced '");
2604 print_generic_expr (dump_file, op, dump_flags);
2605 fprintf (dump_file, "' with %s '",
2606 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2607 print_generic_expr (dump_file, val, dump_flags);
2608 fprintf (dump_file, "'\n");
2611 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2612 that we may have exposed a new symbol for SSA renaming. */
2613 if (TREE_CODE (val) == ADDR_EXPR
2614 || (POINTER_TYPE_P (TREE_TYPE (op))
2615 && is_gimple_min_invariant (val)))
2616 may_have_exposed_new_symbols = true;
2618 propagate_value (op_p, val);
2620 /* And note that we modified this statement. This is now
2621 safe, even if we changed virtual operands since we will
2622 rescan the statement and rewrite its operands again. */
2623 modify_stmt (stmt);
2625 return may_have_exposed_new_symbols;
2628 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2629 known value for that SSA_NAME (or NULL if no value is known).
2631 Propagate values from CONST_AND_COPIES into the uses, vuses and
2632 v_may_def_ops of STMT. */
2634 static bool
2635 cprop_into_stmt (tree stmt, varray_type const_and_copies)
2637 bool may_have_exposed_new_symbols = false;
2638 use_operand_p op_p;
2639 ssa_op_iter iter;
2640 tree rhs;
2642 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2644 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2645 may_have_exposed_new_symbols
2646 |= cprop_operand (stmt, op_p, const_and_copies);
2649 if (may_have_exposed_new_symbols)
2651 rhs = get_rhs (stmt);
2652 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2653 recompute_tree_invarant_for_addr_expr (rhs);
2656 return may_have_exposed_new_symbols;
2660 /* Optimize the statement pointed by iterator SI.
2662 We try to perform some simplistic global redundancy elimination and
2663 constant propagation:
2665 1- To detect global redundancy, we keep track of expressions that have
2666 been computed in this block and its dominators. If we find that the
2667 same expression is computed more than once, we eliminate repeated
2668 computations by using the target of the first one.
2670 2- Constant values and copy assignments. This is used to do very
2671 simplistic constant and copy propagation. When a constant or copy
2672 assignment is found, we map the value on the RHS of the assignment to
2673 the variable in the LHS in the CONST_AND_COPIES table. */
2675 static void
2676 optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
2677 block_stmt_iterator si)
2679 stmt_ann_t ann;
2680 tree stmt;
2681 bool may_optimize_p;
2682 bool may_have_exposed_new_symbols = false;
2683 struct dom_walk_block_data *bd
2684 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
2686 stmt = bsi_stmt (si);
2688 get_stmt_operands (stmt);
2689 ann = stmt_ann (stmt);
2690 opt_stats.num_stmts++;
2691 may_have_exposed_new_symbols = false;
2693 if (dump_file && (dump_flags & TDF_DETAILS))
2695 fprintf (dump_file, "Optimizing statement ");
2696 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2699 /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */
2700 may_have_exposed_new_symbols = cprop_into_stmt (stmt, const_and_copies);
2702 /* If the statement has been modified with constant replacements,
2703 fold its RHS before checking for redundant computations. */
2704 if (ann->modified)
2706 /* Try to fold the statement making sure that STMT is kept
2707 up to date. */
2708 if (fold_stmt (bsi_stmt_ptr (si)))
2710 stmt = bsi_stmt (si);
2711 ann = stmt_ann (stmt);
2713 if (dump_file && (dump_flags & TDF_DETAILS))
2715 fprintf (dump_file, " Folded to: ");
2716 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2720 /* Constant/copy propagation above may change the set of
2721 virtual operands associated with this statement. Folding
2722 may remove the need for some virtual operands.
2724 Indicate we will need to rescan and rewrite the statement. */
2725 may_have_exposed_new_symbols = true;
2728 /* Check for redundant computations. Do this optimization only
2729 for assignments that have no volatile ops and conditionals. */
2730 may_optimize_p = (!ann->has_volatile_ops
2731 && ((TREE_CODE (stmt) == RETURN_EXPR
2732 && TREE_OPERAND (stmt, 0)
2733 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
2734 && ! (TREE_SIDE_EFFECTS
2735 (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
2736 || (TREE_CODE (stmt) == MODIFY_EXPR
2737 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
2738 || TREE_CODE (stmt) == COND_EXPR
2739 || TREE_CODE (stmt) == SWITCH_EXPR));
2741 if (may_optimize_p)
2742 may_have_exposed_new_symbols
2743 |= eliminate_redundant_computations (walk_data, stmt, ann);
2745 /* Record any additional equivalences created by this statement. */
2746 if (TREE_CODE (stmt) == MODIFY_EXPR)
2747 record_equivalences_from_stmt (stmt,
2748 &bd->nonzero_vars,
2749 may_optimize_p,
2750 ann);
2752 register_definitions_for_stmt (stmt);
2754 /* If STMT is a COND_EXPR and it was modified, then we may know
2755 where it goes. If that is the case, then mark the CFG as altered.
2757 This will cause us to later call remove_unreachable_blocks and
2758 cleanup_tree_cfg when it is safe to do so. It is not safe to
2759 clean things up here since removal of edges and such can trigger
2760 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2761 the manager.
2763 That's all fine and good, except that once SSA_NAMEs are released
2764 to the manager, we must not call create_ssa_name until all references
2765 to released SSA_NAMEs have been eliminated.
2767 All references to the deleted SSA_NAMEs can not be eliminated until
2768 we remove unreachable blocks.
2770 We can not remove unreachable blocks until after we have completed
2771 any queued jump threading.
2773 We can not complete any queued jump threads until we have taken
2774 appropriate variables out of SSA form. Taking variables out of
2775 SSA form can call create_ssa_name and thus we lose.
2777 Ultimately I suspect we're going to need to change the interface
2778 into the SSA_NAME manager. */
2780 if (ann->modified)
2782 tree val = NULL;
2784 if (TREE_CODE (stmt) == COND_EXPR)
2785 val = COND_EXPR_COND (stmt);
2786 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2787 val = SWITCH_COND (stmt);
2789 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2790 cfg_altered = true;
2792 /* If we simplified a statement in such a way as to be shown that it
2793 cannot trap, update the eh information and the cfg to match. */
2794 if (maybe_clean_eh_stmt (stmt))
2796 bitmap_set_bit (need_eh_cleanup, bb->index);
2797 if (dump_file && (dump_flags & TDF_DETAILS))
2798 fprintf (dump_file, " Flagged to clear EH edges.\n");
2802 if (may_have_exposed_new_symbols)
2803 VARRAY_PUSH_TREE (stmts_to_rescan, bsi_stmt (si));
2806 /* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the
2807 available expression hashtable, then return the LHS from the hash
2808 table.
2810 If INSERT is true, then we also update the available expression
2811 hash table to account for the changes made to STMT. */
2813 static tree
2814 update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
2816 tree cached_lhs = NULL;
2818 /* Remove the old entry from the hash table. */
2819 if (insert)
2821 struct expr_hash_elt element;
2823 initialize_hash_element (stmt, NULL, &element);
2824 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
2827 /* Now update the RHS of the assignment. */
2828 TREE_OPERAND (stmt, 1) = new_rhs;
2830 /* Now lookup the updated statement in the hash table. */
2831 cached_lhs = lookup_avail_expr (stmt, insert);
2833 /* We have now called lookup_avail_expr twice with two different
2834 versions of this same statement, once in optimize_stmt, once here.
2836 We know the call in optimize_stmt did not find an existing entry
2837 in the hash table, so a new entry was created. At the same time
2838 this statement was pushed onto the BLOCK_AVAIL_EXPRS varray.
2840 If this call failed to find an existing entry on the hash table,
2841 then the new version of this statement was entered into the
2842 hash table. And this statement was pushed onto BLOCK_AVAIL_EXPR
2843 for the second time. So there are two copies on BLOCK_AVAIL_EXPRs
2845 If this call succeeded, we still have one copy of this statement
2846 on the BLOCK_AVAIL_EXPRs varray.
2848 For both cases, we need to pop the most recent entry off the
2849 BLOCK_AVAIL_EXPRs varray. For the case where we never found this
2850 statement in the hash tables, that will leave precisely one
2851 copy of this statement on BLOCK_AVAIL_EXPRs. For the case where
2852 we found a copy of this statement in the second hash table lookup
2853 we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */
2854 if (insert)
2855 VARRAY_POP (avail_exprs_stack);
2857 /* And make sure we record the fact that we modified this
2858 statement. */
2859 modify_stmt (stmt);
2861 return cached_lhs;
2864 /* Search for an existing instance of STMT in the AVAIL_EXPRS table. If
2865 found, return its LHS. Otherwise insert STMT in the table and return
2866 NULL_TREE.
2868 Also, when an expression is first inserted in the AVAIL_EXPRS table, it
2869 is also added to the stack pointed by BLOCK_AVAIL_EXPRS_P, so that they
2870 can be removed when we finish processing this block and its children.
2872 NOTE: This function assumes that STMT is a MODIFY_EXPR node that
2873 contains no CALL_EXPR on its RHS and makes no volatile nor
2874 aliased references. */
2876 static tree
2877 lookup_avail_expr (tree stmt, bool insert)
2879 void **slot;
2880 tree lhs;
2881 tree temp;
2882 struct expr_hash_elt *element = xcalloc (sizeof (struct expr_hash_elt), 1);
2884 lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
2886 initialize_hash_element (stmt, lhs, element);
2888 /* Don't bother remembering constant assignments and copy operations.
2889 Constants and copy operations are handled by the constant/copy propagator
2890 in optimize_stmt. */
2891 if (TREE_CODE (element->rhs) == SSA_NAME
2892 || is_gimple_min_invariant (element->rhs))
2894 free (element);
2895 return NULL_TREE;
2898 /* If this is an equality test against zero, see if we have recorded a
2899 nonzero value for the variable in question. */
2900 if ((TREE_CODE (element->rhs) == EQ_EXPR
2901 || TREE_CODE (element->rhs) == NE_EXPR)
2902 && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
2903 && integer_zerop (TREE_OPERAND (element->rhs, 1)))
2905 int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
2907 if (bitmap_bit_p (nonzero_vars, indx))
2909 tree t = element->rhs;
2910 free (element);
2912 if (TREE_CODE (t) == EQ_EXPR)
2913 return boolean_false_node;
2914 else
2915 return boolean_true_node;
2919 /* Finally try to find the expression in the main expression hash table. */
2920 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
2921 (insert ? INSERT : NO_INSERT));
2922 if (slot == NULL)
2924 free (element);
2925 return NULL_TREE;
2928 if (*slot == NULL)
2930 *slot = (void *) element;
2931 VARRAY_PUSH_TREE (avail_exprs_stack, stmt ? stmt : element->rhs);
2932 return NULL_TREE;
2935 /* Extract the LHS of the assignment so that it can be used as the current
2936 definition of another variable. */
2937 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2939 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2940 use the value from the const_and_copies table. */
2941 if (TREE_CODE (lhs) == SSA_NAME)
2943 temp = get_value_for (lhs, const_and_copies);
2944 if (temp)
2945 lhs = temp;
2948 free (element);
2949 return lhs;
2952 /* Given a condition COND, record into HI_P, LO_P and INVERTED_P the
2953 range of values that result in the conditional having a true value.
2955 Return true if we are successful in extracting a range from COND and
2956 false if we are unsuccessful. */
2958 static bool
2959 extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
2961 tree op1 = TREE_OPERAND (cond, 1);
2962 tree high, low, type;
2963 int inverted;
2965 /* Experiments have shown that it's rarely, if ever useful to
2966 record ranges for enumerations. Presumably this is due to
2967 the fact that they're rarely used directly. They are typically
2968 cast into an integer type and used that way. */
2969 if (TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE)
2970 return 0;
2972 type = TREE_TYPE (op1);
2974 switch (TREE_CODE (cond))
2976 case EQ_EXPR:
2977 high = low = op1;
2978 inverted = 0;
2979 break;
2981 case NE_EXPR:
2982 high = low = op1;
2983 inverted = 1;
2984 break;
2986 case GE_EXPR:
2987 low = op1;
2988 high = TYPE_MAX_VALUE (type);
2989 inverted = 0;
2990 break;
2992 case GT_EXPR:
2993 low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
2994 high = TYPE_MAX_VALUE (type);
2995 inverted = 0;
2996 break;
2998 case LE_EXPR:
2999 high = op1;
3000 low = TYPE_MIN_VALUE (type);
3001 inverted = 0;
3002 break;
3004 case LT_EXPR:
3005 high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
3006 low = TYPE_MIN_VALUE (type);
3007 inverted = 0;
3008 break;
3010 default:
3011 return 0;
3014 *hi_p = high;
3015 *lo_p = low;
3016 *inverted_p = inverted;
3017 return 1;
3020 /* Record a range created by COND for basic block BB. */
3022 static void
3023 record_range (tree cond, basic_block bb, varray_type *vrp_variables_p)
3025 /* We explicitly ignore NE_EXPRs. They rarely allow for meaningful
3026 range optimizations and significantly complicate the implementation. */
3027 if (COMPARISON_CLASS_P (cond)
3028 && TREE_CODE (cond) != NE_EXPR
3029 && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
3031 struct vrp_element *element = ggc_alloc (sizeof (struct vrp_element));
3032 int ssa_version = SSA_NAME_VERSION (TREE_OPERAND (cond, 0));
3034 varray_type *vrp_records_p
3035 = (varray_type *)&VARRAY_GENERIC_PTR (vrp_data, ssa_version);
3037 element->low = NULL;
3038 element->high = NULL;
3039 element->cond = cond;
3040 element->bb = bb;
3042 if (*vrp_records_p == NULL)
3044 VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
3045 VARRAY_GENERIC_PTR (vrp_data, ssa_version) = *vrp_records_p;
3048 VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
3049 if (! *vrp_variables_p)
3050 VARRAY_TREE_INIT (*vrp_variables_p, 2, "vrp_variables");
3051 VARRAY_PUSH_TREE (*vrp_variables_p, TREE_OPERAND (cond, 0));
3055 /* Given a conditional statement IF_STMT, return the assignment 'X = Y'
3056 known to be true depending on which arm of IF_STMT is taken.
3058 Not all conditional statements will result in a useful assignment.
3059 Return NULL_TREE in that case.
3061 Also enter into the available expression table statements of
3062 the form:
3064 TRUE ARM FALSE ARM
3065 1 = cond 1 = cond'
3066 0 = cond' 0 = cond
3068 This allows us to lookup the condition in a dominated block and
3069 get back a constant indicating if the condition is true. */
3071 static struct eq_expr_value
3072 get_eq_expr_value (tree if_stmt,
3073 int true_arm,
3074 basic_block bb,
3075 varray_type *vrp_variables_p)
3077 tree cond;
3078 struct eq_expr_value retval;
3080 cond = COND_EXPR_COND (if_stmt);
3081 retval.src = NULL;
3082 retval.dst = NULL;
3084 /* If the conditional is a single variable 'X', return 'X = 1' for
3085 the true arm and 'X = 0' on the false arm. */
3086 if (TREE_CODE (cond) == SSA_NAME)
3088 retval.dst = cond;
3089 retval.src = constant_boolean_node (true_arm, TREE_TYPE (cond));
3090 return retval;
3093 /* If we have a comparison expression, then record its result into
3094 the available expression table. */
3095 if (COMPARISON_CLASS_P (cond))
3097 tree op0 = TREE_OPERAND (cond, 0);
3098 tree op1 = TREE_OPERAND (cond, 1);
3100 /* Special case comparing booleans against a constant as we know
3101 the value of OP0 on both arms of the branch. ie, we can record
3102 an equivalence for OP0 rather than COND. */
3103 if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
3104 && TREE_CODE (op0) == SSA_NAME
3105 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
3106 && is_gimple_min_invariant (op1))
3108 if ((TREE_CODE (cond) == EQ_EXPR && true_arm)
3109 || (TREE_CODE (cond) == NE_EXPR && ! true_arm))
3111 retval.src = op1;
3113 else
3115 if (integer_zerop (op1))
3116 retval.src = boolean_true_node;
3117 else
3118 retval.src = boolean_false_node;
3120 retval.dst = op0;
3121 return retval;
3124 if (TREE_CODE (op0) == SSA_NAME
3125 && (is_gimple_min_invariant (op1) || TREE_CODE (op1) == SSA_NAME))
3127 tree inverted = invert_truthvalue (cond);
3129 /* When we find an available expression in the hash table, we replace
3130 the expression with the LHS of the statement in the hash table.
3132 So, we want to build statements such as "1 = <condition>" on the
3133 true arm and "0 = <condition>" on the false arm. That way if we
3134 find the expression in the table, we will replace it with its
3135 known constant value. Also insert inversions of the result and
3136 condition into the hash table. */
3137 if (true_arm)
3139 record_cond (cond, boolean_true_node);
3140 record_dominating_conditions (cond);
3141 record_cond (inverted, boolean_false_node);
3143 if (TREE_CONSTANT (op1))
3144 record_range (cond, bb, vrp_variables_p);
3146 /* If the conditional is of the form 'X == Y', return 'X = Y'
3147 for the true arm. */
3148 if (TREE_CODE (cond) == EQ_EXPR)
3150 retval.dst = op0;
3151 retval.src = op1;
3152 return retval;
3155 else
3158 record_cond (inverted, boolean_true_node);
3159 record_dominating_conditions (inverted);
3160 record_cond (cond, boolean_false_node);
3162 if (TREE_CONSTANT (op1))
3163 record_range (inverted, bb, vrp_variables_p);
3165 /* If the conditional is of the form 'X != Y', return 'X = Y'
3166 for the false arm. */
3167 if (TREE_CODE (cond) == NE_EXPR)
3169 retval.dst = op0;
3170 retval.src = op1;
3171 return retval;
3177 return retval;
3180 /* Hashing and equality functions for AVAIL_EXPRS. The table stores
3181 MODIFY_EXPR statements. We compute a value number for expressions using
3182 the code of the expression and the SSA numbers of its operands. */
3184 static hashval_t
3185 avail_expr_hash (const void *p)
3187 stmt_ann_t ann = ((struct expr_hash_elt *)p)->ann;
3188 tree rhs = ((struct expr_hash_elt *)p)->rhs;
3189 hashval_t val = 0;
3190 size_t i;
3191 vuse_optype vuses;
3193 /* iterative_hash_expr knows how to deal with any expression and
3194 deals with commutative operators as well, so just use it instead
3195 of duplicating such complexities here. */
3196 val = iterative_hash_expr (rhs, val);
3198 /* If the hash table entry is not associated with a statement, then we
3199 can just hash the expression and not worry about virtual operands
3200 and such. */
3201 if (!ann)
3202 return val;
3204 /* Add the SSA version numbers of every vuse operand. This is important
3205 because compound variables like arrays are not renamed in the
3206 operands. Rather, the rename is done on the virtual variable
3207 representing all the elements of the array. */
3208 vuses = VUSE_OPS (ann);
3209 for (i = 0; i < NUM_VUSES (vuses); i++)
3210 val = iterative_hash_expr (VUSE_OP (vuses, i), val);
3212 return val;
3215 static hashval_t
3216 real_avail_expr_hash (const void *p)
3218 return ((const struct expr_hash_elt *)p)->hash;
3221 static int
3222 avail_expr_eq (const void *p1, const void *p2)
3224 stmt_ann_t ann1 = ((struct expr_hash_elt *)p1)->ann;
3225 tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
3226 stmt_ann_t ann2 = ((struct expr_hash_elt *)p2)->ann;
3227 tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
3229 /* If they are the same physical expression, return true. */
3230 if (rhs1 == rhs2 && ann1 == ann2)
3231 return true;
3233 /* If their codes are not equal, then quit now. */
3234 if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
3235 return false;
3237 /* In case of a collision, both RHS have to be identical and have the
3238 same VUSE operands. */
3239 if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
3240 || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
3241 && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
3243 vuse_optype ops1 = NULL;
3244 vuse_optype ops2 = NULL;
3245 size_t num_ops1 = 0;
3246 size_t num_ops2 = 0;
3247 size_t i;
3249 if (ann1)
3251 ops1 = VUSE_OPS (ann1);
3252 num_ops1 = NUM_VUSES (ops1);
3255 if (ann2)
3257 ops2 = VUSE_OPS (ann2);
3258 num_ops2 = NUM_VUSES (ops2);
3261 /* If the number of virtual uses is different, then we consider
3262 them not equal. */
3263 if (num_ops1 != num_ops2)
3264 return false;
3266 for (i = 0; i < num_ops1; i++)
3267 if (VUSE_OP (ops1, i) != VUSE_OP (ops2, i))
3268 return false;
3270 gcc_assert (((struct expr_hash_elt *)p1)->hash
3271 == ((struct expr_hash_elt *)p2)->hash);
3272 return true;
3275 return false;
3278 /* Given STMT and a pointer to the block local definitions BLOCK_DEFS_P,
3279 register register all objects set by this statement into BLOCK_DEFS_P
3280 and CURRDEFS. */
3282 static void
3283 register_definitions_for_stmt (tree stmt)
3285 tree def;
3286 ssa_op_iter iter;
3288 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
3291 /* FIXME: We shouldn't be registering new defs if the variable
3292 doesn't need to be renamed. */
3293 register_new_def (def, &block_defs_stack);