2004-09-17 Jeffrey D. Oldham <oldham@codesourcery.com>
[official-gcc.git] / gcc / tree-ssa-dom.c
blob1953dfbde66ccd5d8618e677f3623b419938be86
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "errors.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
46 /* This file implements optimizations on the dominator tree. */
48 /* Hash table with expressions made available during the renaming process.
49 When an assignment of the form X_i = EXPR is found, the statement is
50 stored in this table. If the same expression EXPR is later found on the
51 RHS of another statement, it is replaced with X_i (thus performing
52 global redundancy elimination). Similarly as we pass through conditionals
53 we record the conditional itself as having either a true or false value
54 in this table. */
55 static htab_t avail_exprs;
57 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
58 expressions it enters into the hash table along with a marker entry
59 (null). When we finish processing the block, we pop off entries and
60 remove the expressions from the global hash table until we hit the
61 marker. */
62 static varray_type avail_exprs_stack;
64 /* Stack of trees used to restore the global currdefs to its original
65 state after completing optimization of a block and its dominator children.
67 An SSA_NAME indicates that the current definition of the underlying
68 variable should be set to the given SSA_NAME.
70 A _DECL node indicates that the underlying variable has no current
71 definition.
73 A NULL node is used to mark the last node associated with the
74 current block. */
75 varray_type block_defs_stack;
77 /* Stack of statements we need to rescan during finalization for newly
78 exposed variables.
80 Statement rescanning must occur after the current block's available
81 expressions are removed from AVAIL_EXPRS. Else we may change the
82 hash code for an expression and be unable to find/remove it from
83 AVAIL_EXPRS. */
84 varray_type stmts_to_rescan;
86 /* Structure for entries in the expression hash table.
88 This requires more memory for the hash table entries, but allows us
89 to avoid creating silly tree nodes and annotations for conditionals,
90 eliminates 2 global hash tables and two block local varrays.
92 It also allows us to reduce the number of hash table lookups we
93 have to perform in lookup_avail_expr and finally it allows us to
94 significantly reduce the number of calls into the hashing routine
95 itself. */
97 struct expr_hash_elt
99 /* The value (lhs) of this expression. */
100 tree lhs;
102 /* The expression (rhs) we want to record. */
103 tree rhs;
105 /* The annotation if this element corresponds to a statement. */
106 stmt_ann_t ann;
108 /* The hash value for RHS/ann. */
109 hashval_t hash;
112 /* Table of constant values and copies indexed by SSA name. When the
113 renaming pass finds an assignment of a constant (X_i = C) or a copy
114 assignment from another SSA variable (X_i = Y_j), it creates a mapping
115 between X_i and the RHS in this table. This mapping is used later on,
116 when renaming uses of X_i. If an assignment to X_i is found in this
117 table, instead of using X_i, we use the RHS of the statement stored in
118 this table (thus performing very simplistic copy and constant
119 propagation). */
120 static varray_type const_and_copies;
122 /* Stack of dest,src pairs that need to be restored during finalization.
124 A NULL entry is used to mark the end of pairs which need to be
125 restored during finalization of this block. */
126 static varray_type const_and_copies_stack;
128 /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
129 know their exact value. */
130 static bitmap nonzero_vars;
132 /* Track whether or not we have changed the control flow graph. */
133 static bool cfg_altered;
135 /* Bitmap of blocks that have had EH statements cleaned. We should
136 remove their dead edges eventually. */
137 static bitmap need_eh_cleanup;
139 /* Statistics for dominator optimizations. */
140 struct opt_stats_d
142 long num_stmts;
143 long num_exprs_considered;
144 long num_re;
147 /* Value range propagation record. Each time we encounter a conditional
148 of the form SSA_NAME COND CONST we create a new vrp_element to record
149 how the condition affects the possible values SSA_NAME may have.
151 Each record contains the condition tested (COND), and the the range of
152 values the variable may legitimately have if COND is true. Note the
153 range of values may be a smaller range than COND specifies if we have
154 recorded other ranges for this variable. Each record also contains the
155 block in which the range was recorded for invalidation purposes.
157 Note that the current known range is computed lazily. This allows us
158 to avoid the overhead of computing ranges which are never queried.
160 When we encounter a conditional, we look for records which constrain
161 the SSA_NAME used in the condition. In some cases those records allow
162 us to determine the condition's result at compile time. In other cases
163 they may allow us to simplify the condition.
165 We also use value ranges to do things like transform signed div/mod
166 operations into unsigned div/mod or to simplify ABS_EXPRs.
168 Simple experiments have shown these optimizations to not be all that
169 useful on switch statements (much to my surprise). So switch statement
170 optimizations are not performed.
172 Note carefully we do not propagate information through each statement
173 in the block. ie, if we know variable X has a value defined of
174 [0, 25] and we encounter Y = X + 1, we do not track a value range
175 for Y (which would be [1, 26] if we cared). Similarly we do not
176 constrain values as we encounter narrowing typecasts, etc. */
178 struct vrp_element
180 /* The highest and lowest values the variable in COND may contain when
181 COND is true. Note this may not necessarily be the same values
182 tested by COND if the same variable was used in earlier conditionals.
184 Note this is computed lazily and thus can be NULL indicating that
185 the values have not been computed yet. */
186 tree low;
187 tree high;
189 /* The actual conditional we recorded. This is needed since we compute
190 ranges lazily. */
191 tree cond;
193 /* The basic block where this record was created. We use this to determine
194 when to remove records. */
195 basic_block bb;
198 static struct opt_stats_d opt_stats;
200 /* A virtual array holding value range records for the variable identified
201 by the index, SSA_VERSION. */
202 static varray_type vrp_data;
204 /* Datastructure for block local data used during the dominator walk.
205 We maintain a stack of these as we recursively walk down the
206 dominator tree. */
208 struct dom_walk_block_data
210 /* Similarly for the nonzero state of variables that needs to be
211 restored during finalization. */
212 varray_type nonzero_vars;
214 /* Array of variables which have their values constrained by operations
215 in this basic block. We use this during finalization to know
216 which variables need their VRP data updated. */
217 varray_type vrp_variables;
220 struct eq_expr_value
222 tree src;
223 tree dst;
226 /* Local functions. */
227 static void optimize_stmt (struct dom_walk_data *,
228 basic_block bb,
229 block_stmt_iterator);
230 static inline tree get_value_for (tree, varray_type table);
231 static inline void set_value_for (tree, tree, varray_type table);
232 static tree lookup_avail_expr (tree, bool);
233 static struct eq_expr_value get_eq_expr_value (tree, int,
234 basic_block, varray_type *);
235 static hashval_t avail_expr_hash (const void *);
236 static hashval_t real_avail_expr_hash (const void *);
237 static int avail_expr_eq (const void *, const void *);
238 static void htab_statistics (FILE *, htab_t);
239 static void record_cond (tree, tree);
240 static void record_dominating_conditions (tree);
241 static void record_const_or_copy (tree, tree);
242 static void record_equality (tree, tree);
243 static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
244 static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *,
245 tree, int);
246 static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int);
247 static tree simplify_switch_and_lookup_avail_expr (tree, int);
248 static tree find_equivalent_equality_comparison (tree);
249 static void record_range (tree, basic_block, varray_type *);
250 static bool extract_range_from_cond (tree, tree *, tree *, int *);
251 static void record_equivalences_from_phis (struct dom_walk_data *, basic_block);
252 static void record_equivalences_from_incoming_edge (struct dom_walk_data *,
253 basic_block);
254 static bool eliminate_redundant_computations (struct dom_walk_data *,
255 tree, stmt_ann_t);
256 static void record_equivalences_from_stmt (tree, varray_type *,
257 int, stmt_ann_t);
258 static void thread_across_edge (struct dom_walk_data *, edge);
259 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
260 static void dom_opt_initialize_block_local_data (struct dom_walk_data *,
261 basic_block, bool);
262 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
263 static void cprop_into_phis (struct dom_walk_data *, basic_block);
264 static void remove_local_expressions_from_table (void);
265 static void restore_vars_to_original_value (void);
266 static void restore_currdefs_to_original_value (void);
267 static void register_definitions_for_stmt (tree);
268 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
270 /* Local version of fold that doesn't introduce cruft. */
272 static tree
273 local_fold (tree t)
275 t = fold (t);
277 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
278 may have been added by fold, and "useless" type conversions that might
279 now be apparent due to propagation. */
280 STRIP_USELESS_TYPE_CONVERSION (t);
282 return t;
285 /* Return the value associated with variable VAR in TABLE. */
287 static inline tree
288 get_value_for (tree var, varray_type table)
290 return VARRAY_TREE (table, SSA_NAME_VERSION (var));
293 /* Associate VALUE to variable VAR in TABLE. */
295 static inline void
296 set_value_for (tree var, tree value, varray_type table)
298 VARRAY_TREE (table, SSA_NAME_VERSION (var)) = value;
301 /* Jump threading, redundancy elimination and const/copy propagation.
303 This pass may expose new symbols that need to be renamed into SSA. For
304 every new symbol exposed, its corresponding bit will be set in
305 VARS_TO_RENAME. */
307 static void
308 tree_ssa_dominator_optimize (void)
310 struct dom_walk_data walk_data;
311 unsigned int i;
313 for (i = 0; i < num_referenced_vars; i++)
314 var_ann (referenced_var (i))->current_def = NULL;
316 /* Mark loop edges so we avoid threading across loop boundaries.
317 This may result in transforming natural loop into irreducible
318 region. */
319 mark_dfs_back_edges ();
321 /* Create our hash tables. */
322 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
323 VARRAY_TREE_INIT (avail_exprs_stack, 20, "Available expression stack");
324 VARRAY_TREE_INIT (block_defs_stack, 20, "Block DEFS stack");
325 VARRAY_TREE_INIT (const_and_copies, num_ssa_names, "const_and_copies");
326 VARRAY_TREE_INIT (const_and_copies_stack, 20, "Block const_and_copies stack");
327 nonzero_vars = BITMAP_XMALLOC ();
328 VARRAY_GENERIC_PTR_INIT (vrp_data, num_ssa_names, "vrp_data");
329 need_eh_cleanup = BITMAP_XMALLOC ();
330 VARRAY_TREE_INIT (stmts_to_rescan, 20, "Statements to rescan");
332 /* Setup callbacks for the generic dominator tree walker. */
333 walk_data.walk_stmts_backward = false;
334 walk_data.dom_direction = CDI_DOMINATORS;
335 walk_data.initialize_block_local_data = dom_opt_initialize_block_local_data;
336 walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
337 walk_data.before_dom_children_walk_stmts = optimize_stmt;
338 walk_data.before_dom_children_after_stmts = cprop_into_phis;
339 walk_data.after_dom_children_before_stmts = NULL;
340 walk_data.after_dom_children_walk_stmts = NULL;
341 walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
342 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
343 When we attach more stuff we'll need to fill this out with a real
344 structure. */
345 walk_data.global_data = NULL;
346 walk_data.block_local_data_size = sizeof (struct dom_walk_block_data);
348 /* Now initialize the dominator walker. */
349 init_walk_dominator_tree (&walk_data);
351 calculate_dominance_info (CDI_DOMINATORS);
353 /* If we prove certain blocks are unreachable, then we want to
354 repeat the dominator optimization process as PHI nodes may
355 have turned into copies which allows better propagation of
356 values. So we repeat until we do not identify any new unreachable
357 blocks. */
360 /* Optimize the dominator tree. */
361 cfg_altered = false;
363 /* Recursively walk the dominator tree optimizing statements. */
364 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
366 /* If we exposed any new variables, go ahead and put them into
367 SSA form now, before we handle jump threading. This simplifies
368 interactions between rewriting of _DECL nodes into SSA form
369 and rewriting SSA_NAME nodes into SSA form after block
370 duplication and CFG manipulation. */
371 if (bitmap_first_set_bit (vars_to_rename) >= 0)
373 rewrite_into_ssa (false);
374 bitmap_clear (vars_to_rename);
377 /* Thread jumps, creating duplicate blocks as needed. */
378 cfg_altered = thread_through_all_blocks ();
380 /* Removal of statements may make some EH edges dead. Purge
381 such edges from the CFG as needed. */
382 if (bitmap_first_set_bit (need_eh_cleanup) >= 0)
384 cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
385 bitmap_zero (need_eh_cleanup);
388 free_dominance_info (CDI_DOMINATORS);
389 cfg_altered = cleanup_tree_cfg ();
390 calculate_dominance_info (CDI_DOMINATORS);
392 rewrite_ssa_into_ssa ();
394 if (VARRAY_ACTIVE_SIZE (const_and_copies) <= num_ssa_names)
396 VARRAY_GROW (const_and_copies, num_ssa_names);
397 VARRAY_GROW (vrp_data, num_ssa_names);
400 /* Reinitialize the various tables. */
401 bitmap_clear (nonzero_vars);
402 htab_empty (avail_exprs);
403 VARRAY_CLEAR (const_and_copies);
404 VARRAY_CLEAR (vrp_data);
406 for (i = 0; i < num_referenced_vars; i++)
407 var_ann (referenced_var (i))->current_def = NULL;
409 while (cfg_altered);
411 /* Debugging dumps. */
412 if (dump_file && (dump_flags & TDF_STATS))
413 dump_dominator_optimization_stats (dump_file);
415 /* We emptied the hash table earlier, now delete it completely. */
416 htab_delete (avail_exprs);
418 /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
419 CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
420 of the do-while loop above. */
422 /* And finalize the dominator walker. */
423 fini_walk_dominator_tree (&walk_data);
425 /* Free nonzero_vars. */
426 BITMAP_XFREE (nonzero_vars);
427 BITMAP_XFREE (need_eh_cleanup);
430 static bool
431 gate_dominator (void)
433 return flag_tree_dom != 0;
436 struct tree_opt_pass pass_dominator =
438 "dom", /* name */
439 gate_dominator, /* gate */
440 tree_ssa_dominator_optimize, /* execute */
441 NULL, /* sub */
442 NULL, /* next */
443 0, /* static_pass_number */
444 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
445 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
446 0, /* properties_provided */
447 0, /* properties_destroyed */
448 0, /* todo_flags_start */
449 TODO_dump_func | TODO_rename_vars
450 | TODO_verify_ssa, /* todo_flags_finish */
451 0 /* letter */
455 /* We are exiting BB, see if the target block begins with a conditional
456 jump which has a known value when reached via BB. */
458 static void
459 thread_across_edge (struct dom_walk_data *walk_data, edge e)
461 block_stmt_iterator bsi;
462 tree stmt = NULL;
463 tree phi;
465 /* Each PHI creates a temporary equivalence, record them. */
466 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
468 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
469 tree dst = PHI_RESULT (phi);
470 record_const_or_copy (dst, src);
471 register_new_def (dst, &block_defs_stack);
474 for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
476 tree lhs, cached_lhs;
478 stmt = bsi_stmt (bsi);
480 /* Ignore empty statements and labels. */
481 if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
482 continue;
484 /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
485 value, then stop our search here. Ideally when we stop a
486 search we stop on a COND_EXPR or SWITCH_EXPR. */
487 if (TREE_CODE (stmt) != MODIFY_EXPR
488 || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
489 break;
491 /* At this point we have a statement which assigns an RHS to an
492 SSA_VAR on the LHS. We want to prove that the RHS is already
493 available and that its value is held in the current definition
494 of the LHS -- meaning that this assignment is a NOP when
495 reached via edge E. */
496 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
497 cached_lhs = TREE_OPERAND (stmt, 1);
498 else
499 cached_lhs = lookup_avail_expr (stmt, false);
501 lhs = TREE_OPERAND (stmt, 0);
503 /* This can happen if we thread around to the start of a loop. */
504 if (lhs == cached_lhs)
505 break;
507 /* If we did not find RHS in the hash table, then try again after
508 temporarily const/copy propagating the operands. */
509 if (!cached_lhs)
511 /* Copy the operands. */
512 stmt_ann_t ann = stmt_ann (stmt);
513 use_optype uses = USE_OPS (ann);
514 vuse_optype vuses = VUSE_OPS (ann);
515 tree *uses_copy = xcalloc (NUM_USES (uses), sizeof (tree));
516 tree *vuses_copy = xcalloc (NUM_VUSES (vuses), sizeof (tree));
517 unsigned int i;
519 /* Make a copy of the uses into USES_COPY, then cprop into
520 the use operands. */
521 for (i = 0; i < NUM_USES (uses); i++)
523 tree tmp = NULL;
525 uses_copy[i] = USE_OP (uses, i);
526 if (TREE_CODE (USE_OP (uses, i)) == SSA_NAME)
527 tmp = get_value_for (USE_OP (uses, i), const_and_copies);
528 if (tmp)
529 SET_USE_OP (uses, i, tmp);
532 /* Similarly for virtual uses. */
533 for (i = 0; i < NUM_VUSES (vuses); i++)
535 tree tmp = NULL;
537 vuses_copy[i] = VUSE_OP (vuses, i);
538 if (TREE_CODE (VUSE_OP (vuses, i)) == SSA_NAME)
539 tmp = get_value_for (VUSE_OP (vuses, i), const_and_copies);
540 if (tmp)
541 SET_VUSE_OP (vuses, i, tmp);
544 /* Try to lookup the new expression. */
545 cached_lhs = lookup_avail_expr (stmt, false);
547 /* Restore the statement's original uses/defs. */
548 for (i = 0; i < NUM_USES (uses); i++)
549 SET_USE_OP (uses, i, uses_copy[i]);
551 for (i = 0; i < NUM_VUSES (vuses); i++)
552 SET_VUSE_OP (vuses, i, vuses_copy[i]);
554 free (uses_copy);
555 free (vuses_copy);
557 /* If we still did not find the expression in the hash table,
558 then we can not ignore this statement. */
559 if (! cached_lhs)
560 break;
563 /* If the expression in the hash table was not assigned to an
564 SSA_NAME, then we can not ignore this statement. */
565 if (TREE_CODE (cached_lhs) != SSA_NAME)
566 break;
568 /* If we have different underlying variables, then we can not
569 ignore this statement. */
570 if (SSA_NAME_VAR (cached_lhs) != SSA_NAME_VAR (lhs))
571 break;
573 /* If CACHED_LHS does not represent the current value of the undering
574 variable in CACHED_LHS/LHS, then we can not ignore this statement. */
575 if (var_ann (SSA_NAME_VAR (lhs))->current_def != cached_lhs)
576 break;
578 /* If we got here, then we can ignore this statement and continue
579 walking through the statements in the block looking for a threadable
580 COND_EXPR.
582 We want to record an equivalence lhs = cache_lhs so that if
583 the result of this statement is used later we can copy propagate
584 suitably. */
585 record_const_or_copy (lhs, cached_lhs);
586 register_new_def (lhs, &block_defs_stack);
589 /* If we stopped at a COND_EXPR or SWITCH_EXPR, then see if we know which
590 arm will be taken. */
591 if (stmt
592 && (TREE_CODE (stmt) == COND_EXPR
593 || TREE_CODE (stmt) == SWITCH_EXPR))
595 tree cond, cached_lhs;
596 edge e1;
598 /* Do not forward entry edges into the loop. In the case loop
599 has multiple entry edges we may end up in constructing irreducible
600 region.
601 ??? We may consider forwarding the edges in the case all incoming
602 edges forward to the same destination block. */
603 if (!e->flags & EDGE_DFS_BACK)
605 for (e1 = e->dest->pred; e; e = e->pred_next)
606 if (e1->flags & EDGE_DFS_BACK)
607 break;
608 if (e1)
609 return;
612 /* Now temporarily cprop the operands and try to find the resulting
613 expression in the hash tables. */
614 if (TREE_CODE (stmt) == COND_EXPR)
615 cond = COND_EXPR_COND (stmt);
616 else
617 cond = SWITCH_COND (stmt);
619 if (COMPARISON_CLASS_P (cond))
621 tree dummy_cond, op0, op1;
622 enum tree_code cond_code;
624 op0 = TREE_OPERAND (cond, 0);
625 op1 = TREE_OPERAND (cond, 1);
626 cond_code = TREE_CODE (cond);
628 /* Get the current value of both operands. */
629 if (TREE_CODE (op0) == SSA_NAME)
631 tree tmp = get_value_for (op0, const_and_copies);
632 if (tmp)
633 op0 = tmp;
636 if (TREE_CODE (op1) == SSA_NAME)
638 tree tmp = get_value_for (op1, const_and_copies);
639 if (tmp)
640 op1 = tmp;
643 /* Stuff the operator and operands into our dummy conditional
644 expression, creating the dummy conditional if necessary. */
645 dummy_cond = walk_data->global_data;
646 if (! dummy_cond)
648 dummy_cond = build (cond_code, boolean_type_node, op0, op1);
649 dummy_cond = build (COND_EXPR, void_type_node,
650 dummy_cond, NULL, NULL);
651 walk_data->global_data = dummy_cond;
653 else
655 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), cond_code);
656 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op0;
657 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) = op1;
660 /* If the conditional folds to an invariant, then we are done,
661 otherwise look it up in the hash tables. */
662 cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
663 if (! is_gimple_min_invariant (cached_lhs))
664 cached_lhs = lookup_avail_expr (dummy_cond, false);
665 if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
667 cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
668 NULL,
669 false);
672 /* We can have conditionals which just test the state of a
673 variable rather than use a relational operator. These are
674 simpler to handle. */
675 else if (TREE_CODE (cond) == SSA_NAME)
677 cached_lhs = cond;
678 cached_lhs = get_value_for (cached_lhs, const_and_copies);
679 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
680 cached_lhs = 0;
682 else
683 cached_lhs = lookup_avail_expr (stmt, false);
685 if (cached_lhs)
687 edge taken_edge = find_taken_edge (e->dest, cached_lhs);
688 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
690 if (dest == e->dest)
691 return;
693 /* If we have a known destination for the conditional, then
694 we can perform this optimization, which saves at least one
695 conditional jump each time it applies since we get to
696 bypass the conditional at our original destination. */
697 if (dest)
699 e->aux = taken_edge;
700 bb_ann (e->dest)->incoming_edge_threaded = true;
707 /* Initialize the local stacks.
709 AVAIL_EXPRS stores all the expressions made available in this block.
711 CONST_AND_COPIES stores var/value pairs to restore at the end of this
712 block.
714 NONZERO_VARS stores the vars which have a nonzero value made in this
715 block.
717 STMTS_TO_RESCAN is a list of statements we will rescan for operands.
719 VRP_VARIABLES is the list of variables which have had their values
720 constrained by an operation in this block.
722 These stacks are cleared in the finalization routine run for each
723 block. */
725 static void
726 dom_opt_initialize_block_local_data (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
727 basic_block bb ATTRIBUTE_UNUSED,
728 bool recycled ATTRIBUTE_UNUSED)
730 struct dom_walk_block_data *bd
731 = (struct dom_walk_block_data *)VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
733 /* We get cleared memory from the allocator, so if the memory is not
734 cleared, then we are re-using a previously allocated entry. In
735 that case, we can also re-use the underlying virtual arrays. Just
736 make sure we clear them before using them! */
737 if (recycled)
739 gcc_assert (!bd->nonzero_vars
740 || VARRAY_ACTIVE_SIZE (bd->nonzero_vars) == 0);
741 gcc_assert (!bd->vrp_variables
742 || VARRAY_ACTIVE_SIZE (bd->vrp_variables) == 0);
746 /* Initialize local stacks for this optimizer and record equivalences
747 upon entry to BB. Equivalences can come from the edge traversed to
748 reach BB or they may come from PHI nodes at the start of BB. */
750 static void
751 dom_opt_initialize_block (struct dom_walk_data *walk_data, basic_block bb)
753 if (dump_file && (dump_flags & TDF_DETAILS))
754 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
756 /* Push a marker on the stacks of local information so that we know how
757 far to unwind when we finalize this block. */
758 VARRAY_PUSH_TREE (avail_exprs_stack, NULL_TREE);
759 VARRAY_PUSH_TREE (block_defs_stack, NULL_TREE);
760 VARRAY_PUSH_TREE (const_and_copies_stack, NULL_TREE);
762 record_equivalences_from_incoming_edge (walk_data, bb);
764 /* PHI nodes can create equivalences too. */
765 record_equivalences_from_phis (walk_data, bb);
768 /* Given an expression EXPR (a relational expression or a statement),
769 initialize the hash table element pointed by by ELEMENT. */
771 static void
772 initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
774 /* Hash table elements may be based on conditional expressions or statements.
776 For the former case, we have no annotation and we want to hash the
777 conditional expression. In the latter case we have an annotation and
778 we want to record the expression the statement evaluates. */
779 if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
781 element->ann = NULL;
782 element->rhs = expr;
784 else if (TREE_CODE (expr) == COND_EXPR)
786 element->ann = stmt_ann (expr);
787 element->rhs = COND_EXPR_COND (expr);
789 else if (TREE_CODE (expr) == SWITCH_EXPR)
791 element->ann = stmt_ann (expr);
792 element->rhs = SWITCH_COND (expr);
794 else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
796 element->ann = stmt_ann (expr);
797 element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
799 else
801 element->ann = stmt_ann (expr);
802 element->rhs = TREE_OPERAND (expr, 1);
805 element->lhs = lhs;
806 element->hash = avail_expr_hash (element);
809 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
810 LIMIT entries left in LOCALs. */
812 static void
813 remove_local_expressions_from_table (void)
815 /* Remove all the expressions made available in this block. */
816 while (VARRAY_ACTIVE_SIZE (avail_exprs_stack) > 0)
818 struct expr_hash_elt element;
819 tree expr = VARRAY_TOP_TREE (avail_exprs_stack);
820 VARRAY_POP (avail_exprs_stack);
822 if (expr == NULL_TREE)
823 break;
825 initialize_hash_element (expr, NULL, &element);
826 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
830 /* Use the SSA_NAMES in LOCALS to restore TABLE to its original
831 state, stopping when there are LIMIT entries left in LOCALs. */
833 static void
834 restore_nonzero_vars_to_original_value (varray_type locals,
835 unsigned limit,
836 bitmap table)
838 if (!locals)
839 return;
841 while (VARRAY_ACTIVE_SIZE (locals) > limit)
843 tree name = VARRAY_TOP_TREE (locals);
844 VARRAY_POP (locals);
845 bitmap_clear_bit (table, SSA_NAME_VERSION (name));
849 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
850 CONST_AND_COPIES to its original state, stopping when we hit a
851 NULL marker. */
853 static void
854 restore_vars_to_original_value (void)
856 while (VARRAY_ACTIVE_SIZE (const_and_copies_stack) > 0)
858 tree prev_value, dest;
860 dest = VARRAY_TOP_TREE (const_and_copies_stack);
861 VARRAY_POP (const_and_copies_stack);
863 if (dest == NULL)
864 break;
866 prev_value = VARRAY_TOP_TREE (const_and_copies_stack);
867 VARRAY_POP (const_and_copies_stack);
869 set_value_for (dest, prev_value, const_and_copies);
873 /* Similar to restore_vars_to_original_value, except that it restores
874 CURRDEFS to its original value. */
875 static void
876 restore_currdefs_to_original_value (void)
878 /* Restore CURRDEFS to its original state. */
879 while (VARRAY_ACTIVE_SIZE (block_defs_stack) > 0)
881 tree tmp = VARRAY_TOP_TREE (block_defs_stack);
882 tree saved_def, var;
884 VARRAY_POP (block_defs_stack);
886 if (tmp == NULL_TREE)
887 break;
889 /* If we recorded an SSA_NAME, then make the SSA_NAME the current
890 definition of its underlying variable. If we recorded anything
891 else, it must have been an _DECL node and its current reaching
892 definition must have been NULL. */
893 if (TREE_CODE (tmp) == SSA_NAME)
895 saved_def = tmp;
896 var = SSA_NAME_VAR (saved_def);
898 else
900 saved_def = NULL;
901 var = tmp;
904 var_ann (var)->current_def = saved_def;
908 /* We have finished processing the dominator children of BB, perform
909 any finalization actions in preparation for leaving this node in
910 the dominator tree. */
912 static void
913 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
915 struct dom_walk_block_data *bd
916 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
917 tree last;
919 /* If we are at a leaf node in the dominator graph, see if we can thread
920 the edge from BB through its successor.
922 Do this before we remove entries from our equivalence tables. */
923 if (bb->succ
924 && ! bb->succ->succ_next
925 && (bb->succ->flags & EDGE_ABNORMAL) == 0
926 && (get_immediate_dominator (CDI_DOMINATORS, bb->succ->dest) != bb
927 || phi_nodes (bb->succ->dest)))
930 thread_across_edge (walk_data, bb->succ);
932 else if ((last = last_stmt (bb))
933 && TREE_CODE (last) == COND_EXPR
934 && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
935 || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
936 && bb->succ
937 && (bb->succ->flags & EDGE_ABNORMAL) == 0
938 && bb->succ->succ_next
939 && (bb->succ->succ_next->flags & EDGE_ABNORMAL) == 0
940 && ! bb->succ->succ_next->succ_next)
942 edge true_edge, false_edge;
943 tree cond, inverted = NULL;
944 enum tree_code cond_code;
946 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
948 cond = COND_EXPR_COND (last);
949 cond_code = TREE_CODE (cond);
951 if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
952 inverted = invert_truthvalue (cond);
954 /* If the THEN arm is the end of a dominator tree or has PHI nodes,
955 then try to thread through its edge. */
956 if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb
957 || phi_nodes (true_edge->dest))
959 /* Push a marker onto the available expression stack so that we
960 unwind any expressions related to the TRUE arm before processing
961 the false arm below. */
962 VARRAY_PUSH_TREE (avail_exprs_stack, NULL_TREE);
963 VARRAY_PUSH_TREE (block_defs_stack, NULL_TREE);
964 VARRAY_PUSH_TREE (const_and_copies_stack, NULL_TREE);
966 /* Record any equivalences created by following this edge. */
967 if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
969 record_cond (cond, boolean_true_node);
970 record_dominating_conditions (cond);
971 record_cond (inverted, boolean_false_node);
973 else if (cond_code == SSA_NAME)
974 record_const_or_copy (cond, boolean_true_node);
976 /* Now thread the edge. */
977 thread_across_edge (walk_data, true_edge);
979 /* And restore the various tables to their state before
980 we threaded this edge. */
981 remove_local_expressions_from_table ();
982 restore_vars_to_original_value ();
983 restore_currdefs_to_original_value ();
986 /* Similarly for the ELSE arm. */
987 if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb
988 || phi_nodes (false_edge->dest))
990 /* Record any equivalences created by following this edge. */
991 if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
993 record_cond (cond, boolean_false_node);
994 record_cond (inverted, boolean_true_node);
995 record_dominating_conditions (inverted);
997 else if (cond_code == SSA_NAME)
998 record_const_or_copy (cond, boolean_false_node);
1000 thread_across_edge (walk_data, false_edge);
1002 /* No need to remove local expressions from our tables
1003 or restore vars to their original value as that will
1004 be done immediately below. */
1008 remove_local_expressions_from_table ();
1009 restore_nonzero_vars_to_original_value (bd->nonzero_vars, 0, nonzero_vars);
1010 restore_vars_to_original_value ();
1011 restore_currdefs_to_original_value ();
1013 /* Remove VRP records associated with this basic block. They are no
1014 longer valid.
1016 To be efficient, we note which variables have had their values
1017 constrained in this block. So walk over each variable in the
1018 VRP_VARIABLEs array. */
1019 while (bd->vrp_variables && VARRAY_ACTIVE_SIZE (bd->vrp_variables) > 0)
1021 tree var = VARRAY_TOP_TREE (bd->vrp_variables);
1023 /* Each variable has a stack of value range records. We want to
1024 invalidate those associated with our basic block. So we walk
1025 the array backwards popping off records associated with our
1026 block. Once we hit a record not associated with our block
1027 we are done. */
1028 varray_type var_vrp_records = VARRAY_GENERIC_PTR (vrp_data,
1029 SSA_NAME_VERSION (var));
1031 while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0)
1033 struct vrp_element *element
1034 = (struct vrp_element *)VARRAY_TOP_GENERIC_PTR (var_vrp_records);
1036 if (element->bb != bb)
1037 break;
1039 VARRAY_POP (var_vrp_records);
1042 VARRAY_POP (bd->vrp_variables);
1045 /* If we queued any statements to rescan in this block, then
1046 go ahead and rescan them now. */
1047 while (VARRAY_ACTIVE_SIZE (stmts_to_rescan) > 0)
1049 tree stmt = VARRAY_TOP_TREE (stmts_to_rescan);
1050 basic_block stmt_bb = bb_for_stmt (stmt);
1052 if (stmt_bb != bb)
1053 break;
1055 VARRAY_POP (stmts_to_rescan);
1056 mark_new_vars_to_rename (stmt, vars_to_rename);
1060 /* PHI nodes can create equivalences too.
1062 Ignoring any alternatives which are the same as the result, if
1063 all the alternatives are equal, then the PHI node creates an
1064 equivalence.
1066 Additionally, if all the PHI alternatives are known to have a nonzero
1067 value, then the result of this PHI is known to have a nonzero value,
1068 even if we do not know its exact value. */
1070 static void
1071 record_equivalences_from_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1072 basic_block bb)
1074 tree phi;
1076 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1078 tree lhs = PHI_RESULT (phi);
1079 tree rhs = NULL;
1080 int i;
1082 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1084 tree t = PHI_ARG_DEF (phi, i);
1086 if (TREE_CODE (t) == SSA_NAME || is_gimple_min_invariant (t))
1088 /* Ignore alternatives which are the same as our LHS. */
1089 if (operand_equal_p (lhs, t, 0))
1090 continue;
1092 /* If we have not processed an alternative yet, then set
1093 RHS to this alternative. */
1094 if (rhs == NULL)
1095 rhs = t;
1096 /* If we have processed an alternative (stored in RHS), then
1097 see if it is equal to this one. If it isn't, then stop
1098 the search. */
1099 else if (! operand_equal_p (rhs, t, 0))
1100 break;
1102 else
1103 break;
1106 /* If we had no interesting alternatives, then all the RHS alternatives
1107 must have been the same as LHS. */
1108 if (!rhs)
1109 rhs = lhs;
1111 /* If we managed to iterate through each PHI alternative without
1112 breaking out of the loop, then we have a PHI which may create
1113 a useful equivalence. We do not need to record unwind data for
1114 this, since this is a true assignment and not an equivalence
1115 inferred from a comparison. All uses of this ssa name are dominated
1116 by this assignment, so unwinding just costs time and space. */
1117 if (i == PHI_NUM_ARGS (phi)
1118 && may_propagate_copy (lhs, rhs))
1119 set_value_for (lhs, rhs, const_and_copies);
1121 /* Now see if we know anything about the nonzero property for the
1122 result of this PHI. */
1123 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1125 if (!PHI_ARG_NONZERO (phi, i))
1126 break;
1129 if (i == PHI_NUM_ARGS (phi))
1130 bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
1132 register_new_def (lhs, &block_defs_stack);
1136 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1137 return that edge. Otherwise return NULL. */
1138 static edge
1139 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1141 edge retval = NULL;
1142 edge e;
1144 for (e = bb->pred; e; e = e->pred_next)
1146 /* A loop back edge can be identified by the destination of
1147 the edge dominating the source of the edge. */
1148 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1149 continue;
1151 /* If we have already seen a non-loop edge, then we must have
1152 multiple incoming non-loop edges and thus we return NULL. */
1153 if (retval)
1154 return NULL;
1156 /* This is the first non-loop incoming edge we have found. Record
1157 it. */
1158 retval = e;
1161 return retval;
1164 /* Record any equivalences created by the incoming edge to BB. If BB
1165 has more than one incoming edge, then no equivalence is created. */
1167 static void
1168 record_equivalences_from_incoming_edge (struct dom_walk_data *walk_data,
1169 basic_block bb)
1171 int edge_flags;
1172 basic_block parent;
1173 struct eq_expr_value eq_expr_value;
1174 tree parent_block_last_stmt = NULL;
1175 struct dom_walk_block_data *bd
1176 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
1178 /* If our parent block ended with a control statment, then we may be
1179 able to record some equivalences based on which outgoing edge from
1180 the parent was followed. */
1181 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1182 if (parent)
1184 parent_block_last_stmt = last_stmt (parent);
1185 if (parent_block_last_stmt && !is_ctrl_stmt (parent_block_last_stmt))
1186 parent_block_last_stmt = NULL;
1189 eq_expr_value.src = NULL;
1190 eq_expr_value.dst = NULL;
1192 /* If we have a single predecessor (ignoring loop backedges), then extract
1193 EDGE_FLAGS from the single incoming edge. Otherwise just return as
1194 there is nothing to do. */
1195 if (bb->pred
1196 && parent_block_last_stmt)
1198 edge e = single_incoming_edge_ignoring_loop_edges (bb);
1199 if (e && bb_for_stmt (parent_block_last_stmt) == e->src)
1200 edge_flags = e->flags;
1201 else
1202 return;
1204 else
1205 return;
1207 /* If our parent block ended in a COND_EXPR, add any equivalences
1208 created by the COND_EXPR to the hash table and initialize
1209 EQ_EXPR_VALUE appropriately.
1211 EQ_EXPR_VALUE is an assignment expression created when BB's immediate
1212 dominator ends in a COND_EXPR statement whose predicate is of the form
1213 'VAR == VALUE', where VALUE may be another variable or a constant.
1214 This is used to propagate VALUE on the THEN_CLAUSE of that
1215 conditional. This assignment is inserted in CONST_AND_COPIES so that
1216 the copy and constant propagator can find more propagation
1217 opportunities. */
1218 if (TREE_CODE (parent_block_last_stmt) == COND_EXPR
1219 && (edge_flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1220 eq_expr_value = get_eq_expr_value (parent_block_last_stmt,
1221 (edge_flags & EDGE_TRUE_VALUE) != 0,
1223 &bd->vrp_variables);
1224 /* Similarly when the parent block ended in a SWITCH_EXPR.
1225 We can only know the value of the switch's condition if the dominator
1226 parent is also the only predecessor of this block. */
1227 else if (bb->pred->src == parent
1228 && TREE_CODE (parent_block_last_stmt) == SWITCH_EXPR)
1230 tree switch_cond = SWITCH_COND (parent_block_last_stmt);
1232 /* If the switch's condition is an SSA variable, then we may
1233 know its value at each of the case labels. */
1234 if (TREE_CODE (switch_cond) == SSA_NAME)
1236 tree switch_vec = SWITCH_LABELS (parent_block_last_stmt);
1237 size_t i, n = TREE_VEC_LENGTH (switch_vec);
1238 int case_count = 0;
1239 tree match_case = NULL_TREE;
1241 /* Search the case labels for those whose destination is
1242 the current basic block. */
1243 for (i = 0; i < n; ++i)
1245 tree elt = TREE_VEC_ELT (switch_vec, i);
1246 if (label_to_block (CASE_LABEL (elt)) == bb)
1248 if (++case_count > 1 || CASE_HIGH (elt))
1249 break;
1250 match_case = elt;
1254 /* If we encountered precisely one CASE_LABEL_EXPR and it
1255 was not the default case, or a case range, then we know
1256 the exact value of SWITCH_COND which caused us to get to
1257 this block. Record that equivalence in EQ_EXPR_VALUE. */
1258 if (case_count == 1
1259 && match_case
1260 && CASE_LOW (match_case)
1261 && !CASE_HIGH (match_case))
1263 eq_expr_value.dst = switch_cond;
1264 eq_expr_value.src = fold_convert (TREE_TYPE (switch_cond),
1265 CASE_LOW (match_case));
1270 /* If EQ_EXPR_VALUE (VAR == VALUE) is given, register the VALUE as a
1271 new value for VAR, so that occurrences of VAR can be replaced with
1272 VALUE while re-writing the THEN arm of a COND_EXPR. */
1273 if (eq_expr_value.src && eq_expr_value.dst)
1274 record_equality (eq_expr_value.dst, eq_expr_value.src);
1277 /* Dump SSA statistics on FILE. */
1279 void
1280 dump_dominator_optimization_stats (FILE *file)
1282 long n_exprs;
1284 fprintf (file, "Total number of statements: %6ld\n\n",
1285 opt_stats.num_stmts);
1286 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1287 opt_stats.num_exprs_considered);
1289 n_exprs = opt_stats.num_exprs_considered;
1290 if (n_exprs == 0)
1291 n_exprs = 1;
1293 fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
1294 opt_stats.num_re, PERCENT (opt_stats.num_re,
1295 n_exprs));
1297 fprintf (file, "\nHash table statistics:\n");
1299 fprintf (file, " avail_exprs: ");
1300 htab_statistics (file, avail_exprs);
1304 /* Dump SSA statistics on stderr. */
1306 void
1307 debug_dominator_optimization_stats (void)
1309 dump_dominator_optimization_stats (stderr);
1313 /* Dump statistics for the hash table HTAB. */
1315 static void
1316 htab_statistics (FILE *file, htab_t htab)
1318 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1319 (long) htab_size (htab),
1320 (long) htab_elements (htab),
1321 htab_collisions (htab));
1324 /* Record the fact that VAR has a nonzero value, though we may not know
1325 its exact value. Note that if VAR is already known to have a nonzero
1326 value, then we do nothing. */
1328 static void
1329 record_var_is_nonzero (tree var, varray_type *block_nonzero_vars_p)
1331 int indx = SSA_NAME_VERSION (var);
1333 if (bitmap_bit_p (nonzero_vars, indx))
1334 return;
1336 /* Mark it in the global table. */
1337 bitmap_set_bit (nonzero_vars, indx);
1339 /* Record this SSA_NAME so that we can reset the global table
1340 when we leave this block. */
1341 if (! *block_nonzero_vars_p)
1342 VARRAY_TREE_INIT (*block_nonzero_vars_p, 2, "block_nonzero_vars");
1343 VARRAY_PUSH_TREE (*block_nonzero_vars_p, var);
1346 /* Enter a statement into the true/false expression hash table indicating
1347 that the condition COND has the value VALUE. */
1349 static void
1350 record_cond (tree cond, tree value)
1352 struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
1353 void **slot;
1355 initialize_hash_element (cond, value, element);
1357 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1358 element->hash, true);
1359 if (*slot == NULL)
1361 *slot = (void *) element;
1362 VARRAY_PUSH_TREE (avail_exprs_stack, cond);
1364 else
1365 free (element);
1368 /* COND is a condition which is known to be true. Record variants of
1369 COND which must also be true.
1371 For example, if a < b is true, then a <= b must also be true. */
1373 static void
1374 record_dominating_conditions (tree cond)
1376 switch (TREE_CODE (cond))
1378 case LT_EXPR:
1379 record_cond (build2 (LE_EXPR, boolean_type_node,
1380 TREE_OPERAND (cond, 0),
1381 TREE_OPERAND (cond, 1)),
1382 boolean_true_node);
1383 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1384 TREE_OPERAND (cond, 0),
1385 TREE_OPERAND (cond, 1)),
1386 boolean_true_node);
1387 record_cond (build2 (NE_EXPR, boolean_type_node,
1388 TREE_OPERAND (cond, 0),
1389 TREE_OPERAND (cond, 1)),
1390 boolean_true_node);
1391 record_cond (build2 (LTGT_EXPR, boolean_type_node,
1392 TREE_OPERAND (cond, 0),
1393 TREE_OPERAND (cond, 1)),
1394 boolean_true_node);
1395 break;
1397 case GT_EXPR:
1398 record_cond (build2 (GE_EXPR, boolean_type_node,
1399 TREE_OPERAND (cond, 0),
1400 TREE_OPERAND (cond, 1)),
1401 boolean_true_node);
1402 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1403 TREE_OPERAND (cond, 0),
1404 TREE_OPERAND (cond, 1)),
1405 boolean_true_node);
1406 record_cond (build2 (NE_EXPR, boolean_type_node,
1407 TREE_OPERAND (cond, 0),
1408 TREE_OPERAND (cond, 1)),
1409 boolean_true_node);
1410 record_cond (build2 (LTGT_EXPR, boolean_type_node,
1411 TREE_OPERAND (cond, 0),
1412 TREE_OPERAND (cond, 1)),
1413 boolean_true_node);
1414 break;
1416 case GE_EXPR:
1417 case LE_EXPR:
1418 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1419 TREE_OPERAND (cond, 0),
1420 TREE_OPERAND (cond, 1)),
1421 boolean_true_node);
1422 break;
1424 case EQ_EXPR:
1425 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1426 TREE_OPERAND (cond, 0),
1427 TREE_OPERAND (cond, 1)),
1428 boolean_true_node);
1429 record_cond (build2 (LE_EXPR, boolean_type_node,
1430 TREE_OPERAND (cond, 0),
1431 TREE_OPERAND (cond, 1)),
1432 boolean_true_node);
1433 record_cond (build2 (GE_EXPR, boolean_type_node,
1434 TREE_OPERAND (cond, 0),
1435 TREE_OPERAND (cond, 1)),
1436 boolean_true_node);
1437 break;
1439 case UNORDERED_EXPR:
1440 record_cond (build2 (NE_EXPR, boolean_type_node,
1441 TREE_OPERAND (cond, 0),
1442 TREE_OPERAND (cond, 1)),
1443 boolean_true_node);
1444 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1445 TREE_OPERAND (cond, 0),
1446 TREE_OPERAND (cond, 1)),
1447 boolean_true_node);
1448 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1449 TREE_OPERAND (cond, 0),
1450 TREE_OPERAND (cond, 1)),
1451 boolean_true_node);
1452 record_cond (build2 (UNEQ_EXPR, boolean_type_node,
1453 TREE_OPERAND (cond, 0),
1454 TREE_OPERAND (cond, 1)),
1455 boolean_true_node);
1456 record_cond (build2 (UNLT_EXPR, boolean_type_node,
1457 TREE_OPERAND (cond, 0),
1458 TREE_OPERAND (cond, 1)),
1459 boolean_true_node);
1460 record_cond (build2 (UNGT_EXPR, boolean_type_node,
1461 TREE_OPERAND (cond, 0),
1462 TREE_OPERAND (cond, 1)),
1463 boolean_true_node);
1464 break;
1466 case UNLT_EXPR:
1467 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1468 TREE_OPERAND (cond, 0),
1469 TREE_OPERAND (cond, 1)),
1470 boolean_true_node);
1471 record_cond (build2 (NE_EXPR, boolean_type_node,
1472 TREE_OPERAND (cond, 0),
1473 TREE_OPERAND (cond, 1)),
1474 boolean_true_node);
1475 break;
1477 case UNGT_EXPR:
1478 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1479 TREE_OPERAND (cond, 0),
1480 TREE_OPERAND (cond, 1)),
1481 boolean_true_node);
1482 record_cond (build2 (NE_EXPR, boolean_type_node,
1483 TREE_OPERAND (cond, 0),
1484 TREE_OPERAND (cond, 1)),
1485 boolean_true_node);
1486 break;
1488 case UNEQ_EXPR:
1489 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1490 TREE_OPERAND (cond, 0),
1491 TREE_OPERAND (cond, 1)),
1492 boolean_true_node);
1493 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1494 TREE_OPERAND (cond, 0),
1495 TREE_OPERAND (cond, 1)),
1496 boolean_true_node);
1497 break;
1499 case LTGT_EXPR:
1500 record_cond (build2 (NE_EXPR, boolean_type_node,
1501 TREE_OPERAND (cond, 0),
1502 TREE_OPERAND (cond, 1)),
1503 boolean_true_node);
1504 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1505 TREE_OPERAND (cond, 0),
1506 TREE_OPERAND (cond, 1)),
1507 boolean_true_node);
1509 default:
1510 break;
1514 /* A helper function for record_const_or_copy and record_equality.
1515 Do the work of recording the value and undo info. */
1517 static void
1518 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1520 set_value_for (x, y, const_and_copies);
1522 VARRAY_PUSH_TREE (const_and_copies_stack, prev_x);
1523 VARRAY_PUSH_TREE (const_and_copies_stack, x);
1526 /* Record that X is equal to Y in const_and_copies. Record undo
1527 information in the block-local varray. */
1529 static void
1530 record_const_or_copy (tree x, tree y)
1532 tree prev_x = get_value_for (x, const_and_copies);
1534 if (TREE_CODE (y) == SSA_NAME)
1536 tree tmp = get_value_for (y, const_and_copies);
1537 if (tmp)
1538 y = tmp;
1541 record_const_or_copy_1 (x, y, prev_x);
1544 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1545 This constrains the cases in which we may treat this as assignment. */
1547 static void
1548 record_equality (tree x, tree y)
1550 tree prev_x = NULL, prev_y = NULL;
1552 if (TREE_CODE (x) == SSA_NAME)
1553 prev_x = get_value_for (x, const_and_copies);
1554 if (TREE_CODE (y) == SSA_NAME)
1555 prev_y = get_value_for (y, const_and_copies);
1557 /* If one of the previous values is invariant, then use that.
1558 Otherwise it doesn't matter which value we choose, just so
1559 long as we canonicalize on one value. */
1560 if (TREE_INVARIANT (y))
1562 else if (TREE_INVARIANT (x))
1563 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1564 else if (prev_x && TREE_INVARIANT (prev_x))
1565 x = y, y = prev_x, prev_x = prev_y;
1566 else if (prev_y)
1567 y = prev_y;
1569 /* After the swapping, we must have one SSA_NAME. */
1570 if (TREE_CODE (x) != SSA_NAME)
1571 return;
1573 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1574 variable compared against zero. If we're honoring signed zeros,
1575 then we cannot record this value unless we know that the value is
1576 nonzero. */
1577 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1578 && (TREE_CODE (y) != REAL_CST
1579 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1580 return;
1582 record_const_or_copy_1 (x, y, prev_x);
1585 /* STMT is a MODIFY_EXPR for which we were unable to find RHS in the
1586 hash tables. Try to simplify the RHS using whatever equivalences
1587 we may have recorded.
1589 If we are able to simplify the RHS, then lookup the simplified form in
1590 the hash table and return the result. Otherwise return NULL. */
1592 static tree
1593 simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data,
1594 tree stmt, int insert)
1596 tree rhs = TREE_OPERAND (stmt, 1);
1597 enum tree_code rhs_code = TREE_CODE (rhs);
1598 tree result = NULL;
1600 /* If we have lhs = ~x, look and see if we earlier had x = ~y.
1601 In which case we can change this statement to be lhs = y.
1602 Which can then be copy propagated.
1604 Similarly for negation. */
1605 if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR)
1606 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1608 /* Get the definition statement for our RHS. */
1609 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1611 /* See if the RHS_DEF_STMT has the same form as our statement. */
1612 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
1613 && TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code)
1615 tree rhs_def_operand;
1617 rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0);
1619 /* Verify that RHS_DEF_OPERAND is a suitable SSA variable. */
1620 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1621 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1622 result = update_rhs_and_lookup_avail_expr (stmt,
1623 rhs_def_operand,
1624 insert);
1628 /* If we have z = (x OP C1), see if we earlier had x = y OP C2.
1629 If OP is associative, create and fold (y OP C2) OP C1 which
1630 should result in (y OP C3), use that as the RHS for the
1631 assignment. Add minus to this, as we handle it specially below. */
1632 if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
1633 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
1634 && is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
1636 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1638 /* See if the RHS_DEF_STMT has the same form as our statement. */
1639 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR)
1641 tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
1642 enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
1644 if (rhs_code == rhs_def_code
1645 || (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
1646 || (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
1648 tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0);
1649 tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1);
1651 if (TREE_CODE (def_stmt_op0) == SSA_NAME
1652 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0)
1653 && is_gimple_min_invariant (def_stmt_op1))
1655 tree outer_const = TREE_OPERAND (rhs, 1);
1656 tree type = TREE_TYPE (TREE_OPERAND (stmt, 0));
1657 tree t;
1659 /* If we care about correct floating point results, then
1660 don't fold x + c1 - c2. Note that we need to take both
1661 the codes and the signs to figure this out. */
1662 if (FLOAT_TYPE_P (type)
1663 && !flag_unsafe_math_optimizations
1664 && (rhs_def_code == PLUS_EXPR
1665 || rhs_def_code == MINUS_EXPR))
1667 bool neg = false;
1669 neg ^= (rhs_code == MINUS_EXPR);
1670 neg ^= (rhs_def_code == MINUS_EXPR);
1671 neg ^= real_isneg (TREE_REAL_CST_PTR (outer_const));
1672 neg ^= real_isneg (TREE_REAL_CST_PTR (def_stmt_op1));
1674 if (neg)
1675 goto dont_fold_assoc;
1678 /* Ho hum. So fold will only operate on the outermost
1679 thingy that we give it, so we have to build the new
1680 expression in two pieces. This requires that we handle
1681 combinations of plus and minus. */
1682 if (rhs_def_code != rhs_code)
1684 if (rhs_def_code == MINUS_EXPR)
1685 t = build (MINUS_EXPR, type, outer_const, def_stmt_op1);
1686 else
1687 t = build (MINUS_EXPR, type, def_stmt_op1, outer_const);
1688 rhs_code = PLUS_EXPR;
1690 else if (rhs_def_code == MINUS_EXPR)
1691 t = build (PLUS_EXPR, type, def_stmt_op1, outer_const);
1692 else
1693 t = build (rhs_def_code, type, def_stmt_op1, outer_const);
1694 t = local_fold (t);
1695 t = build (rhs_code, type, def_stmt_op0, t);
1696 t = local_fold (t);
1698 /* If the result is a suitable looking gimple expression,
1699 then use it instead of the original for STMT. */
1700 if (TREE_CODE (t) == SSA_NAME
1701 || (UNARY_CLASS_P (t)
1702 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1703 || ((BINARY_CLASS_P (t) || COMPARISON_CLASS_P (t))
1704 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
1705 && is_gimple_val (TREE_OPERAND (t, 1))))
1706 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1710 dont_fold_assoc:;
1713 /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR
1714 and BIT_AND_EXPR respectively if the first operand is greater
1715 than zero and the second operand is an exact power of two. */
1716 if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR)
1717 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
1718 && integer_pow2p (TREE_OPERAND (rhs, 1)))
1720 tree val;
1721 tree op = TREE_OPERAND (rhs, 0);
1723 if (TYPE_UNSIGNED (TREE_TYPE (op)))
1725 val = integer_one_node;
1727 else
1729 tree dummy_cond = walk_data->global_data;
1731 if (! dummy_cond)
1733 dummy_cond = build (GT_EXPR, boolean_type_node,
1734 op, integer_zero_node);
1735 dummy_cond = build (COND_EXPR, void_type_node,
1736 dummy_cond, NULL, NULL);
1737 walk_data->global_data = dummy_cond;
1739 else
1741 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GT_EXPR);
1742 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1743 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1744 = integer_zero_node;
1746 val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
1749 if (val && integer_onep (val))
1751 tree t;
1752 tree op0 = TREE_OPERAND (rhs, 0);
1753 tree op1 = TREE_OPERAND (rhs, 1);
1755 if (rhs_code == TRUNC_DIV_EXPR)
1756 t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0,
1757 build_int_cst (NULL_TREE, tree_log2 (op1)));
1758 else
1759 t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0,
1760 local_fold (build (MINUS_EXPR, TREE_TYPE (op1),
1761 op1, integer_one_node)));
1763 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1767 /* Transform ABS (X) into X or -X as appropriate. */
1768 if (rhs_code == ABS_EXPR
1769 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
1771 tree val;
1772 tree op = TREE_OPERAND (rhs, 0);
1773 tree type = TREE_TYPE (op);
1775 if (TYPE_UNSIGNED (type))
1777 val = integer_zero_node;
1779 else
1781 tree dummy_cond = walk_data->global_data;
1783 if (! dummy_cond)
1785 dummy_cond = build (LE_EXPR, boolean_type_node,
1786 op, integer_zero_node);
1787 dummy_cond = build (COND_EXPR, void_type_node,
1788 dummy_cond, NULL, NULL);
1789 walk_data->global_data = dummy_cond;
1791 else
1793 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), LE_EXPR);
1794 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1795 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1796 = build_int_cst (type, 0);
1798 val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
1800 if (!val)
1802 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GE_EXPR);
1803 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1804 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1805 = build_int_cst (type, 0);
1807 val = simplify_cond_and_lookup_avail_expr (dummy_cond,
1808 NULL, false);
1810 if (val)
1812 if (integer_zerop (val))
1813 val = integer_one_node;
1814 else if (integer_onep (val))
1815 val = integer_zero_node;
1820 if (val
1821 && (integer_onep (val) || integer_zerop (val)))
1823 tree t;
1825 if (integer_onep (val))
1826 t = build1 (NEGATE_EXPR, TREE_TYPE (op), op);
1827 else
1828 t = op;
1830 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1834 /* Optimize *"foo" into 'f'. This is done here rather than
1835 in fold to avoid problems with stuff like &*"foo". */
1836 if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
1838 tree t = fold_read_from_constant_string (rhs);
1840 if (t)
1841 result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1844 return result;
1847 /* COND is a condition of the form:
1849 x == const or x != const
1851 Look back to x's defining statement and see if x is defined as
1853 x = (type) y;
1855 If const is unchanged if we convert it to type, then we can build
1856 the equivalent expression:
1859 y == const or y != const
1861 Which may allow further optimizations.
1863 Return the equivalent comparison or NULL if no such equivalent comparison
1864 was found. */
1866 static tree
1867 find_equivalent_equality_comparison (tree cond)
1869 tree op0 = TREE_OPERAND (cond, 0);
1870 tree op1 = TREE_OPERAND (cond, 1);
1871 tree def_stmt = SSA_NAME_DEF_STMT (op0);
1873 /* OP0 might have been a parameter, so first make sure it
1874 was defined by a MODIFY_EXPR. */
1875 if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR)
1877 tree def_rhs = TREE_OPERAND (def_stmt, 1);
1879 /* Now make sure the RHS of the MODIFY_EXPR is a typecast. */
1880 if ((TREE_CODE (def_rhs) == NOP_EXPR
1881 || TREE_CODE (def_rhs) == CONVERT_EXPR)
1882 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
1884 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
1885 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
1886 tree new;
1888 if (TYPE_PRECISION (def_rhs_inner_type)
1889 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
1890 return NULL;
1892 /* What we want to prove is that if we convert OP1 to
1893 the type of the object inside the NOP_EXPR that the
1894 result is still equivalent to SRC.
1896 If that is true, the build and return new equivalent
1897 condition which uses the source of the typecast and the
1898 new constant (which has only changed its type). */
1899 new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
1900 new = local_fold (new);
1901 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
1902 return build (TREE_CODE (cond), TREE_TYPE (cond),
1903 def_rhs_inner, new);
1906 return NULL;
1909 /* STMT is a COND_EXPR for which we could not trivially determine its
1910 result. This routine attempts to find equivalent forms of the
1911 condition which we may be able to optimize better. It also
1912 uses simple value range propagation to optimize conditionals. */
1914 static tree
1915 simplify_cond_and_lookup_avail_expr (tree stmt,
1916 stmt_ann_t ann,
1917 int insert)
1919 tree cond = COND_EXPR_COND (stmt);
1921 if (COMPARISON_CLASS_P (cond))
1923 tree op0 = TREE_OPERAND (cond, 0);
1924 tree op1 = TREE_OPERAND (cond, 1);
1926 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
1928 int limit;
1929 tree low, high, cond_low, cond_high;
1930 int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
1931 varray_type vrp_records;
1932 struct vrp_element *element;
1934 /* First see if we have test of an SSA_NAME against a constant
1935 where the SSA_NAME is defined by an earlier typecast which
1936 is irrelevant when performing tests against the given
1937 constant. */
1938 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1940 tree new_cond = find_equivalent_equality_comparison (cond);
1942 if (new_cond)
1944 /* Update the statement to use the new equivalent
1945 condition. */
1946 COND_EXPR_COND (stmt) = new_cond;
1948 /* If this is not a real stmt, ann will be NULL and we
1949 avoid processing the operands. */
1950 if (ann)
1951 modify_stmt (stmt);
1953 /* Lookup the condition and return its known value if it
1954 exists. */
1955 new_cond = lookup_avail_expr (stmt, insert);
1956 if (new_cond)
1957 return new_cond;
1959 /* The operands have changed, so update op0 and op1. */
1960 op0 = TREE_OPERAND (cond, 0);
1961 op1 = TREE_OPERAND (cond, 1);
1965 /* Consult the value range records for this variable (if they exist)
1966 to see if we can eliminate or simplify this conditional.
1968 Note two tests are necessary to determine no records exist.
1969 First we have to see if the virtual array exists, if it
1970 exists, then we have to check its active size.
1972 Also note the vast majority of conditionals are not testing
1973 a variable which has had its range constrained by an earlier
1974 conditional. So this filter avoids a lot of unnecessary work. */
1975 vrp_records = VARRAY_GENERIC_PTR (vrp_data, SSA_NAME_VERSION (op0));
1976 if (vrp_records == NULL)
1977 return NULL;
1979 limit = VARRAY_ACTIVE_SIZE (vrp_records);
1981 /* If we have no value range records for this variable, or we are
1982 unable to extract a range for this condition, then there is
1983 nothing to do. */
1984 if (limit == 0
1985 || ! extract_range_from_cond (cond, &cond_high,
1986 &cond_low, &cond_inverted))
1987 return NULL;
1989 /* We really want to avoid unnecessary computations of range
1990 info. So all ranges are computed lazily; this avoids a
1991 lot of unnecessary work. ie, we record the conditional,
1992 but do not process how it constrains the variable's
1993 potential values until we know that processing the condition
1994 could be helpful.
1996 However, we do not want to have to walk a potentially long
1997 list of ranges, nor do we want to compute a variable's
1998 range more than once for a given path.
2000 Luckily, each time we encounter a conditional that can not
2001 be otherwise optimized we will end up here and we will
2002 compute the necessary range information for the variable
2003 used in this condition.
2005 Thus you can conclude that there will never be more than one
2006 conditional associated with a variable which has not been
2007 processed. So we never need to merge more than one new
2008 conditional into the current range.
2010 These properties also help us avoid unnecessary work. */
2011 element
2012 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, limit - 1);
2014 if (element->high && element->low)
2016 /* The last element has been processed, so there is no range
2017 merging to do, we can simply use the high/low values
2018 recorded in the last element. */
2019 low = element->low;
2020 high = element->high;
2022 else
2024 tree tmp_high, tmp_low;
2025 int dummy;
2027 /* The last element has not been processed. Process it now. */
2028 extract_range_from_cond (element->cond, &tmp_high,
2029 &tmp_low, &dummy);
2031 /* If this is the only element, then no merging is necessary,
2032 the high/low values from extract_range_from_cond are all
2033 we need. */
2034 if (limit == 1)
2036 low = tmp_low;
2037 high = tmp_high;
2039 else
2041 /* Get the high/low value from the previous element. */
2042 struct vrp_element *prev
2043 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records,
2044 limit - 2);
2045 low = prev->low;
2046 high = prev->high;
2048 /* Merge in this element's range with the range from the
2049 previous element.
2051 The low value for the merged range is the maximum of
2052 the previous low value and the low value of this record.
2054 Similarly the high value for the merged range is the
2055 minimum of the previous high value and the high value of
2056 this record. */
2057 low = (tree_int_cst_compare (low, tmp_low) == 1
2058 ? low : tmp_low);
2059 high = (tree_int_cst_compare (high, tmp_high) == -1
2060 ? high : tmp_high);
2063 /* And record the computed range. */
2064 element->low = low;
2065 element->high = high;
2069 /* After we have constrained this variable's potential values,
2070 we try to determine the result of the given conditional.
2072 To simplify later tests, first determine if the current
2073 low value is the same low value as the conditional.
2074 Similarly for the current high value and the high value
2075 for the conditional. */
2076 lowequal = tree_int_cst_equal (low, cond_low);
2077 highequal = tree_int_cst_equal (high, cond_high);
2079 if (lowequal && highequal)
2080 return (cond_inverted ? boolean_false_node : boolean_true_node);
2082 /* To simplify the overlap/subset tests below we may want
2083 to swap the two ranges so that the larger of the two
2084 ranges occurs "first". */
2085 swapped = 0;
2086 if (tree_int_cst_compare (low, cond_low) == 1
2087 || (lowequal
2088 && tree_int_cst_compare (cond_high, high) == 1))
2090 tree temp;
2092 swapped = 1;
2093 temp = low;
2094 low = cond_low;
2095 cond_low = temp;
2096 temp = high;
2097 high = cond_high;
2098 cond_high = temp;
2101 /* Now determine if there is no overlap in the ranges
2102 or if the second range is a subset of the first range. */
2103 no_overlap = tree_int_cst_lt (high, cond_low);
2104 subset = tree_int_cst_compare (cond_high, high) != 1;
2106 /* If there was no overlap in the ranges, then this conditional
2107 always has a false value (unless we had to invert this
2108 conditional, in which case it always has a true value). */
2109 if (no_overlap)
2110 return (cond_inverted ? boolean_true_node : boolean_false_node);
2112 /* If the current range is a subset of the condition's range,
2113 then this conditional always has a true value (unless we
2114 had to invert this conditional, in which case it always
2115 has a true value). */
2116 if (subset && swapped)
2117 return (cond_inverted ? boolean_false_node : boolean_true_node);
2119 /* We were unable to determine the result of the conditional.
2120 However, we may be able to simplify the conditional. First
2121 merge the ranges in the same manner as range merging above. */
2122 low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
2123 high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
2125 /* If the range has converged to a single point, then turn this
2126 into an equality comparison. */
2127 if (TREE_CODE (cond) != EQ_EXPR
2128 && TREE_CODE (cond) != NE_EXPR
2129 && tree_int_cst_equal (low, high))
2131 TREE_SET_CODE (cond, EQ_EXPR);
2132 TREE_OPERAND (cond, 1) = high;
2136 return 0;
2139 /* STMT is a SWITCH_EXPR for which we could not trivially determine its
2140 result. This routine attempts to find equivalent forms of the
2141 condition which we may be able to optimize better. */
2143 static tree
2144 simplify_switch_and_lookup_avail_expr (tree stmt, int insert)
2146 tree cond = SWITCH_COND (stmt);
2147 tree def, to, ti;
2149 /* The optimization that we really care about is removing unnecessary
2150 casts. That will let us do much better in propagating the inferred
2151 constant at the switch target. */
2152 if (TREE_CODE (cond) == SSA_NAME)
2154 def = SSA_NAME_DEF_STMT (cond);
2155 if (TREE_CODE (def) == MODIFY_EXPR)
2157 def = TREE_OPERAND (def, 1);
2158 if (TREE_CODE (def) == NOP_EXPR)
2160 int need_precision;
2161 bool fail;
2163 def = TREE_OPERAND (def, 0);
2165 #ifdef ENABLE_CHECKING
2166 /* ??? Why was Jeff testing this? We are gimple... */
2167 gcc_assert (is_gimple_val (def));
2168 #endif
2170 to = TREE_TYPE (cond);
2171 ti = TREE_TYPE (def);
2173 /* If we have an extension that preserves value, then we
2174 can copy the source value into the switch. */
2176 need_precision = TYPE_PRECISION (ti);
2177 fail = false;
2178 if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
2179 fail = true;
2180 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
2181 need_precision += 1;
2182 if (TYPE_PRECISION (to) < need_precision)
2183 fail = true;
2185 if (!fail)
2187 SWITCH_COND (stmt) = def;
2188 modify_stmt (stmt);
2190 return lookup_avail_expr (stmt, insert);
2196 return 0;
2200 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2201 known value for that SSA_NAME (or NULL if no value is known).
2203 NONZERO_VARS is the set SSA_NAMES known to have a nonzero value,
2204 even if we don't know their precise value.
2206 Propagate values from CONST_AND_COPIES and NONZERO_VARS into the PHI
2207 nodes of the successors of BB. */
2209 static void
2210 cprop_into_successor_phis (basic_block bb,
2211 varray_type const_and_copies,
2212 bitmap nonzero_vars)
2214 edge e;
2216 /* This can get rather expensive if the implementation is naive in
2217 how it finds the phi alternative associated with a particular edge. */
2218 for (e = bb->succ; e; e = e->succ_next)
2220 tree phi;
2221 int phi_num_args;
2222 int hint;
2224 /* If this is an abnormal edge, then we do not want to copy propagate
2225 into the PHI alternative associated with this edge. */
2226 if (e->flags & EDGE_ABNORMAL)
2227 continue;
2229 phi = phi_nodes (e->dest);
2230 if (! phi)
2231 continue;
2233 /* There is no guarantee that for any two PHI nodes in a block that
2234 the phi alternative associated with a particular edge will be
2235 at the same index in the phi alternative array.
2237 However, it is very likely they will be the same. So we keep
2238 track of the index of the alternative where we found the edge in
2239 the previous phi node and check that index first in the next
2240 phi node. If that hint fails, then we actually search all
2241 the entries. */
2242 phi_num_args = PHI_NUM_ARGS (phi);
2243 hint = phi_num_args;
2244 for ( ; phi; phi = PHI_CHAIN (phi))
2246 int i;
2247 tree new;
2248 use_operand_p orig_p;
2249 tree orig;
2251 /* If the hint is valid (!= phi_num_args), see if it points
2252 us to the desired phi alternative. */
2253 if (hint != phi_num_args && PHI_ARG_EDGE (phi, hint) == e)
2255 else
2257 /* The hint was either invalid or did not point to the
2258 correct phi alternative. Search all the alternatives
2259 for the correct one. Update the hint. */
2260 for (i = 0; i < phi_num_args; i++)
2261 if (PHI_ARG_EDGE (phi, i) == e)
2262 break;
2263 hint = i;
2266 /* If we did not find the proper alternative, then something is
2267 horribly wrong. */
2268 gcc_assert (hint != phi_num_args);
2270 /* The alternative may be associated with a constant, so verify
2271 it is an SSA_NAME before doing anything with it. */
2272 orig_p = PHI_ARG_DEF_PTR (phi, hint);
2273 orig = USE_FROM_PTR (orig_p);
2274 if (TREE_CODE (orig) != SSA_NAME)
2275 continue;
2277 /* If the alternative is known to have a nonzero value, record
2278 that fact in the PHI node itself for future use. */
2279 if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
2280 PHI_ARG_NONZERO (phi, hint) = true;
2282 /* If we have *ORIG_P in our constant/copy table, then replace
2283 ORIG_P with its value in our constant/copy table. */
2284 new = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (orig));
2285 if (new
2286 && (TREE_CODE (new) == SSA_NAME
2287 || is_gimple_min_invariant (new))
2288 && may_propagate_copy (orig, new))
2290 propagate_value (orig_p, new);
2297 /* Propagate known constants/copies into PHI nodes of BB's successor
2298 blocks. */
2300 static void
2301 cprop_into_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2302 basic_block bb)
2304 cprop_into_successor_phis (bb, const_and_copies, nonzero_vars);
2307 /* Search for redundant computations in STMT. If any are found, then
2308 replace them with the variable holding the result of the computation.
2310 If safe, record this expression into the available expression hash
2311 table. */
2313 static bool
2314 eliminate_redundant_computations (struct dom_walk_data *walk_data,
2315 tree stmt, stmt_ann_t ann)
2317 v_may_def_optype v_may_defs = V_MAY_DEF_OPS (ann);
2318 tree *expr_p, def = NULL_TREE;
2319 bool insert = true;
2320 tree cached_lhs;
2321 bool retval = false;
2323 if (TREE_CODE (stmt) == MODIFY_EXPR)
2324 def = TREE_OPERAND (stmt, 0);
2326 /* Certain expressions on the RHS can be optimized away, but can not
2327 themselves be entered into the hash tables. */
2328 if (ann->makes_aliased_stores
2329 || ! def
2330 || TREE_CODE (def) != SSA_NAME
2331 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2332 || NUM_V_MAY_DEFS (v_may_defs) != 0)
2333 insert = false;
2335 /* Check if the expression has been computed before. */
2336 cached_lhs = lookup_avail_expr (stmt, insert);
2338 /* If this is an assignment and the RHS was not in the hash table,
2339 then try to simplify the RHS and lookup the new RHS in the
2340 hash table. */
2341 if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
2342 cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data, stmt, insert);
2343 /* Similarly if this is a COND_EXPR and we did not find its
2344 expression in the hash table, simplify the condition and
2345 try again. */
2346 else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
2347 cached_lhs = simplify_cond_and_lookup_avail_expr (stmt, ann, insert);
2348 /* Similarly for a SWITCH_EXPR. */
2349 else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR)
2350 cached_lhs = simplify_switch_and_lookup_avail_expr (stmt, insert);
2352 opt_stats.num_exprs_considered++;
2354 /* Get a pointer to the expression we are trying to optimize. */
2355 if (TREE_CODE (stmt) == COND_EXPR)
2356 expr_p = &COND_EXPR_COND (stmt);
2357 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2358 expr_p = &SWITCH_COND (stmt);
2359 else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
2360 expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2361 else
2362 expr_p = &TREE_OPERAND (stmt, 1);
2364 /* It is safe to ignore types here since we have already done
2365 type checking in the hashing and equality routines. In fact
2366 type checking here merely gets in the way of constant
2367 propagation. Also, make sure that it is safe to propagate
2368 CACHED_LHS into *EXPR_P. */
2369 if (cached_lhs
2370 && (TREE_CODE (cached_lhs) != SSA_NAME
2371 || may_propagate_copy (*expr_p, cached_lhs)))
2373 if (dump_file && (dump_flags & TDF_DETAILS))
2375 fprintf (dump_file, " Replaced redundant expr '");
2376 print_generic_expr (dump_file, *expr_p, dump_flags);
2377 fprintf (dump_file, "' with '");
2378 print_generic_expr (dump_file, cached_lhs, dump_flags);
2379 fprintf (dump_file, "'\n");
2382 opt_stats.num_re++;
2384 #if defined ENABLE_CHECKING
2385 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
2386 || is_gimple_min_invariant (cached_lhs));
2387 #endif
2389 if (TREE_CODE (cached_lhs) == ADDR_EXPR
2390 || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
2391 && is_gimple_min_invariant (cached_lhs)))
2392 retval = true;
2394 propagate_tree_value (expr_p, cached_lhs);
2395 modify_stmt (stmt);
2397 return retval;
2400 /* STMT, a MODIFY_EXPR, may create certain equivalences, in either
2401 the available expressions table or the const_and_copies table.
2402 Detect and record those equivalences. */
2404 static void
2405 record_equivalences_from_stmt (tree stmt,
2406 varray_type *block_nonzero_vars_p,
2407 int may_optimize_p,
2408 stmt_ann_t ann)
2410 tree lhs = TREE_OPERAND (stmt, 0);
2411 enum tree_code lhs_code = TREE_CODE (lhs);
2412 int i;
2414 if (lhs_code == SSA_NAME)
2416 tree rhs = TREE_OPERAND (stmt, 1);
2418 /* Strip away any useless type conversions. */
2419 STRIP_USELESS_TYPE_CONVERSION (rhs);
2421 /* If the RHS of the assignment is a constant or another variable that
2422 may be propagated, register it in the CONST_AND_COPIES table. We
2423 do not need to record unwind data for this, since this is a true
2424 assignment and not an equivalence inferred from a comparison. All
2425 uses of this ssa name are dominated by this assignment, so unwinding
2426 just costs time and space. */
2427 if (may_optimize_p
2428 && (TREE_CODE (rhs) == SSA_NAME
2429 || is_gimple_min_invariant (rhs)))
2430 set_value_for (lhs, rhs, const_and_copies);
2432 /* alloca never returns zero and the address of a non-weak symbol
2433 is never zero. NOP_EXPRs and CONVERT_EXPRs can be completely
2434 stripped as they do not affect this equivalence. */
2435 while (TREE_CODE (rhs) == NOP_EXPR
2436 || TREE_CODE (rhs) == CONVERT_EXPR)
2437 rhs = TREE_OPERAND (rhs, 0);
2439 if (alloca_call_p (rhs)
2440 || (TREE_CODE (rhs) == ADDR_EXPR
2441 && DECL_P (TREE_OPERAND (rhs, 0))
2442 && ! DECL_WEAK (TREE_OPERAND (rhs, 0))))
2443 record_var_is_nonzero (lhs, block_nonzero_vars_p);
2445 /* IOR of any value with a nonzero value will result in a nonzero
2446 value. Even if we do not know the exact result recording that
2447 the result is nonzero is worth the effort. */
2448 if (TREE_CODE (rhs) == BIT_IOR_EXPR
2449 && integer_nonzerop (TREE_OPERAND (rhs, 1)))
2450 record_var_is_nonzero (lhs, block_nonzero_vars_p);
2453 /* Look at both sides for pointer dereferences. If we find one, then
2454 the pointer must be nonnull and we can enter that equivalence into
2455 the hash tables. */
2456 if (flag_delete_null_pointer_checks)
2457 for (i = 0; i < 2; i++)
2459 tree t = TREE_OPERAND (stmt, i);
2461 /* Strip away any COMPONENT_REFs. */
2462 while (TREE_CODE (t) == COMPONENT_REF)
2463 t = TREE_OPERAND (t, 0);
2465 /* Now see if this is a pointer dereference. */
2466 if (TREE_CODE (t) == INDIRECT_REF)
2468 tree op = TREE_OPERAND (t, 0);
2470 /* If the pointer is a SSA variable, then enter new
2471 equivalences into the hash table. */
2472 while (TREE_CODE (op) == SSA_NAME)
2474 tree def = SSA_NAME_DEF_STMT (op);
2476 record_var_is_nonzero (op, block_nonzero_vars_p);
2478 /* And walk up the USE-DEF chains noting other SSA_NAMEs
2479 which are known to have a nonzero value. */
2480 if (def
2481 && TREE_CODE (def) == MODIFY_EXPR
2482 && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
2483 op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
2484 else
2485 break;
2490 /* A memory store, even an aliased store, creates a useful
2491 equivalence. By exchanging the LHS and RHS, creating suitable
2492 vops and recording the result in the available expression table,
2493 we may be able to expose more redundant loads. */
2494 if (!ann->has_volatile_ops
2495 && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
2496 || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
2497 && !is_gimple_reg (lhs))
2499 tree rhs = TREE_OPERAND (stmt, 1);
2500 tree new;
2502 /* FIXME: If the LHS of the assignment is a bitfield and the RHS
2503 is a constant, we need to adjust the constant to fit into the
2504 type of the LHS. If the LHS is a bitfield and the RHS is not
2505 a constant, then we can not record any equivalences for this
2506 statement since we would need to represent the widening or
2507 narrowing of RHS. This fixes gcc.c-torture/execute/921016-1.c
2508 and should not be necessary if GCC represented bitfields
2509 properly. */
2510 if (lhs_code == COMPONENT_REF
2511 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
2513 if (TREE_CONSTANT (rhs))
2514 rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
2515 else
2516 rhs = NULL;
2518 /* If the value overflowed, then we can not use this equivalence. */
2519 if (rhs && ! is_gimple_min_invariant (rhs))
2520 rhs = NULL;
2523 if (rhs)
2525 /* Build a new statement with the RHS and LHS exchanged. */
2526 new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
2528 create_ssa_artficial_load_stmt (&(ann->operands), new);
2530 /* Finally enter the statement into the available expression
2531 table. */
2532 lookup_avail_expr (new, true);
2537 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2538 CONST_AND_COPIES. */
2540 static bool
2541 cprop_operand (tree stmt, use_operand_p op_p, varray_type const_and_copies)
2543 bool may_have_exposed_new_symbols = false;
2544 tree val;
2545 tree op = USE_FROM_PTR (op_p);
2547 /* If the operand has a known constant value or it is known to be a
2548 copy of some other variable, use the value or copy stored in
2549 CONST_AND_COPIES. */
2550 val = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (op));
2551 if (val)
2553 tree op_type, val_type;
2555 /* Do not change the base variable in the virtual operand
2556 tables. That would make it impossible to reconstruct
2557 the renamed virtual operand if we later modify this
2558 statement. Also only allow the new value to be an SSA_NAME
2559 for propagation into virtual operands. */
2560 if (!is_gimple_reg (op)
2561 && (get_virtual_var (val) != get_virtual_var (op)
2562 || TREE_CODE (val) != SSA_NAME))
2563 return false;
2565 /* Get the toplevel type of each operand. */
2566 op_type = TREE_TYPE (op);
2567 val_type = TREE_TYPE (val);
2569 /* While both types are pointers, get the type of the object
2570 pointed to. */
2571 while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2573 op_type = TREE_TYPE (op_type);
2574 val_type = TREE_TYPE (val_type);
2577 /* Make sure underlying types match before propagating a constant by
2578 converting the constant to the proper type. Note that convert may
2579 return a non-gimple expression, in which case we ignore this
2580 propagation opportunity. */
2581 if (TREE_CODE (val) != SSA_NAME)
2583 if (!lang_hooks.types_compatible_p (op_type, val_type))
2585 val = fold_convert (TREE_TYPE (op), val);
2586 if (!is_gimple_min_invariant (val))
2587 return false;
2591 /* Certain operands are not allowed to be copy propagated due
2592 to their interaction with exception handling and some GCC
2593 extensions. */
2594 else if (!may_propagate_copy (op, val))
2595 return false;
2597 /* Dump details. */
2598 if (dump_file && (dump_flags & TDF_DETAILS))
2600 fprintf (dump_file, " Replaced '");
2601 print_generic_expr (dump_file, op, dump_flags);
2602 fprintf (dump_file, "' with %s '",
2603 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2604 print_generic_expr (dump_file, val, dump_flags);
2605 fprintf (dump_file, "'\n");
2608 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2609 that we may have exposed a new symbol for SSA renaming. */
2610 if (TREE_CODE (val) == ADDR_EXPR
2611 || (POINTER_TYPE_P (TREE_TYPE (op))
2612 && is_gimple_min_invariant (val)))
2613 may_have_exposed_new_symbols = true;
2615 propagate_value (op_p, val);
2617 /* And note that we modified this statement. This is now
2618 safe, even if we changed virtual operands since we will
2619 rescan the statement and rewrite its operands again. */
2620 modify_stmt (stmt);
2622 return may_have_exposed_new_symbols;
2625 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2626 known value for that SSA_NAME (or NULL if no value is known).
2628 Propagate values from CONST_AND_COPIES into the uses, vuses and
2629 v_may_def_ops of STMT. */
2631 static bool
2632 cprop_into_stmt (tree stmt, varray_type const_and_copies)
2634 bool may_have_exposed_new_symbols = false;
2635 use_operand_p op_p;
2636 ssa_op_iter iter;
2637 tree rhs;
2639 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2641 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2642 may_have_exposed_new_symbols
2643 |= cprop_operand (stmt, op_p, const_and_copies);
2646 if (may_have_exposed_new_symbols)
2648 rhs = get_rhs (stmt);
2649 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2650 recompute_tree_invarant_for_addr_expr (rhs);
2653 return may_have_exposed_new_symbols;
2657 /* Optimize the statement pointed by iterator SI.
2659 We try to perform some simplistic global redundancy elimination and
2660 constant propagation:
2662 1- To detect global redundancy, we keep track of expressions that have
2663 been computed in this block and its dominators. If we find that the
2664 same expression is computed more than once, we eliminate repeated
2665 computations by using the target of the first one.
2667 2- Constant values and copy assignments. This is used to do very
2668 simplistic constant and copy propagation. When a constant or copy
2669 assignment is found, we map the value on the RHS of the assignment to
2670 the variable in the LHS in the CONST_AND_COPIES table. */
2672 static void
2673 optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
2674 block_stmt_iterator si)
2676 stmt_ann_t ann;
2677 tree stmt;
2678 bool may_optimize_p;
2679 bool may_have_exposed_new_symbols = false;
2680 struct dom_walk_block_data *bd
2681 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
2683 stmt = bsi_stmt (si);
2685 get_stmt_operands (stmt);
2686 ann = stmt_ann (stmt);
2687 opt_stats.num_stmts++;
2688 may_have_exposed_new_symbols = false;
2690 if (dump_file && (dump_flags & TDF_DETAILS))
2692 fprintf (dump_file, "Optimizing statement ");
2693 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2696 /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */
2697 may_have_exposed_new_symbols = cprop_into_stmt (stmt, const_and_copies);
2699 /* If the statement has been modified with constant replacements,
2700 fold its RHS before checking for redundant computations. */
2701 if (ann->modified)
2703 /* Try to fold the statement making sure that STMT is kept
2704 up to date. */
2705 if (fold_stmt (bsi_stmt_ptr (si)))
2707 stmt = bsi_stmt (si);
2708 ann = stmt_ann (stmt);
2710 if (dump_file && (dump_flags & TDF_DETAILS))
2712 fprintf (dump_file, " Folded to: ");
2713 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2717 /* Constant/copy propagation above may change the set of
2718 virtual operands associated with this statement. Folding
2719 may remove the need for some virtual operands.
2721 Indicate we will need to rescan and rewrite the statement. */
2722 may_have_exposed_new_symbols = true;
2725 /* Check for redundant computations. Do this optimization only
2726 for assignments that have no volatile ops and conditionals. */
2727 may_optimize_p = (!ann->has_volatile_ops
2728 && ((TREE_CODE (stmt) == RETURN_EXPR
2729 && TREE_OPERAND (stmt, 0)
2730 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
2731 && ! (TREE_SIDE_EFFECTS
2732 (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
2733 || (TREE_CODE (stmt) == MODIFY_EXPR
2734 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
2735 || TREE_CODE (stmt) == COND_EXPR
2736 || TREE_CODE (stmt) == SWITCH_EXPR));
2738 if (may_optimize_p)
2739 may_have_exposed_new_symbols
2740 |= eliminate_redundant_computations (walk_data, stmt, ann);
2742 /* Record any additional equivalences created by this statement. */
2743 if (TREE_CODE (stmt) == MODIFY_EXPR)
2744 record_equivalences_from_stmt (stmt,
2745 &bd->nonzero_vars,
2746 may_optimize_p,
2747 ann);
2749 register_definitions_for_stmt (stmt);
2751 /* If STMT is a COND_EXPR and it was modified, then we may know
2752 where it goes. If that is the case, then mark the CFG as altered.
2754 This will cause us to later call remove_unreachable_blocks and
2755 cleanup_tree_cfg when it is safe to do so. It is not safe to
2756 clean things up here since removal of edges and such can trigger
2757 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2758 the manager.
2760 That's all fine and good, except that once SSA_NAMEs are released
2761 to the manager, we must not call create_ssa_name until all references
2762 to released SSA_NAMEs have been eliminated.
2764 All references to the deleted SSA_NAMEs can not be eliminated until
2765 we remove unreachable blocks.
2767 We can not remove unreachable blocks until after we have completed
2768 any queued jump threading.
2770 We can not complete any queued jump threads until we have taken
2771 appropriate variables out of SSA form. Taking variables out of
2772 SSA form can call create_ssa_name and thus we lose.
2774 Ultimately I suspect we're going to need to change the interface
2775 into the SSA_NAME manager. */
2777 if (ann->modified)
2779 tree val = NULL;
2781 if (TREE_CODE (stmt) == COND_EXPR)
2782 val = COND_EXPR_COND (stmt);
2783 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2784 val = SWITCH_COND (stmt);
2786 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2787 cfg_altered = true;
2789 /* If we simplified a statement in such a way as to be shown that it
2790 cannot trap, update the eh information and the cfg to match. */
2791 if (maybe_clean_eh_stmt (stmt))
2793 bitmap_set_bit (need_eh_cleanup, bb->index);
2794 if (dump_file && (dump_flags & TDF_DETAILS))
2795 fprintf (dump_file, " Flagged to clear EH edges.\n");
2799 if (may_have_exposed_new_symbols)
2800 VARRAY_PUSH_TREE (stmts_to_rescan, bsi_stmt (si));
2803 /* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the
2804 available expression hashtable, then return the LHS from the hash
2805 table.
2807 If INSERT is true, then we also update the available expression
2808 hash table to account for the changes made to STMT. */
2810 static tree
2811 update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
2813 tree cached_lhs = NULL;
2815 /* Remove the old entry from the hash table. */
2816 if (insert)
2818 struct expr_hash_elt element;
2820 initialize_hash_element (stmt, NULL, &element);
2821 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
2824 /* Now update the RHS of the assignment. */
2825 TREE_OPERAND (stmt, 1) = new_rhs;
2827 /* Now lookup the updated statement in the hash table. */
2828 cached_lhs = lookup_avail_expr (stmt, insert);
2830 /* We have now called lookup_avail_expr twice with two different
2831 versions of this same statement, once in optimize_stmt, once here.
2833 We know the call in optimize_stmt did not find an existing entry
2834 in the hash table, so a new entry was created. At the same time
2835 this statement was pushed onto the BLOCK_AVAIL_EXPRS varray.
2837 If this call failed to find an existing entry on the hash table,
2838 then the new version of this statement was entered into the
2839 hash table. And this statement was pushed onto BLOCK_AVAIL_EXPR
2840 for the second time. So there are two copies on BLOCK_AVAIL_EXPRs
2842 If this call succeeded, we still have one copy of this statement
2843 on the BLOCK_AVAIL_EXPRs varray.
2845 For both cases, we need to pop the most recent entry off the
2846 BLOCK_AVAIL_EXPRs varray. For the case where we never found this
2847 statement in the hash tables, that will leave precisely one
2848 copy of this statement on BLOCK_AVAIL_EXPRs. For the case where
2849 we found a copy of this statement in the second hash table lookup
2850 we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */
2851 if (insert)
2852 VARRAY_POP (avail_exprs_stack);
2854 /* And make sure we record the fact that we modified this
2855 statement. */
2856 modify_stmt (stmt);
2858 return cached_lhs;
2861 /* Search for an existing instance of STMT in the AVAIL_EXPRS table. If
2862 found, return its LHS. Otherwise insert STMT in the table and return
2863 NULL_TREE.
2865 Also, when an expression is first inserted in the AVAIL_EXPRS table, it
2866 is also added to the stack pointed by BLOCK_AVAIL_EXPRS_P, so that they
2867 can be removed when we finish processing this block and its children.
2869 NOTE: This function assumes that STMT is a MODIFY_EXPR node that
2870 contains no CALL_EXPR on its RHS and makes no volatile nor
2871 aliased references. */
2873 static tree
2874 lookup_avail_expr (tree stmt, bool insert)
2876 void **slot;
2877 tree lhs;
2878 tree temp;
2879 struct expr_hash_elt *element = xcalloc (sizeof (struct expr_hash_elt), 1);
2881 lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
2883 initialize_hash_element (stmt, lhs, element);
2885 /* Don't bother remembering constant assignments and copy operations.
2886 Constants and copy operations are handled by the constant/copy propagator
2887 in optimize_stmt. */
2888 if (TREE_CODE (element->rhs) == SSA_NAME
2889 || is_gimple_min_invariant (element->rhs))
2891 free (element);
2892 return NULL_TREE;
2895 /* If this is an equality test against zero, see if we have recorded a
2896 nonzero value for the variable in question. */
2897 if ((TREE_CODE (element->rhs) == EQ_EXPR
2898 || TREE_CODE (element->rhs) == NE_EXPR)
2899 && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
2900 && integer_zerop (TREE_OPERAND (element->rhs, 1)))
2902 int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
2904 if (bitmap_bit_p (nonzero_vars, indx))
2906 tree t = element->rhs;
2907 free (element);
2909 if (TREE_CODE (t) == EQ_EXPR)
2910 return boolean_false_node;
2911 else
2912 return boolean_true_node;
2916 /* Finally try to find the expression in the main expression hash table. */
2917 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
2918 (insert ? INSERT : NO_INSERT));
2919 if (slot == NULL)
2921 free (element);
2922 return NULL_TREE;
2925 if (*slot == NULL)
2927 *slot = (void *) element;
2928 VARRAY_PUSH_TREE (avail_exprs_stack, stmt ? stmt : element->rhs);
2929 return NULL_TREE;
2932 /* Extract the LHS of the assignment so that it can be used as the current
2933 definition of another variable. */
2934 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2936 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2937 use the value from the const_and_copies table. */
2938 if (TREE_CODE (lhs) == SSA_NAME)
2940 temp = get_value_for (lhs, const_and_copies);
2941 if (temp)
2942 lhs = temp;
2945 free (element);
2946 return lhs;
2949 /* Given a condition COND, record into HI_P, LO_P and INVERTED_P the
2950 range of values that result in the conditional having a true value.
2952 Return true if we are successful in extracting a range from COND and
2953 false if we are unsuccessful. */
2955 static bool
2956 extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
2958 tree op1 = TREE_OPERAND (cond, 1);
2959 tree high, low, type;
2960 int inverted;
2962 /* Experiments have shown that it's rarely, if ever useful to
2963 record ranges for enumerations. Presumably this is due to
2964 the fact that they're rarely used directly. They are typically
2965 cast into an integer type and used that way. */
2966 if (TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE)
2967 return 0;
2969 type = TREE_TYPE (op1);
2971 switch (TREE_CODE (cond))
2973 case EQ_EXPR:
2974 high = low = op1;
2975 inverted = 0;
2976 break;
2978 case NE_EXPR:
2979 high = low = op1;
2980 inverted = 1;
2981 break;
2983 case GE_EXPR:
2984 low = op1;
2985 high = TYPE_MAX_VALUE (type);
2986 inverted = 0;
2987 break;
2989 case GT_EXPR:
2990 low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
2991 high = TYPE_MAX_VALUE (type);
2992 inverted = 0;
2993 break;
2995 case LE_EXPR:
2996 high = op1;
2997 low = TYPE_MIN_VALUE (type);
2998 inverted = 0;
2999 break;
3001 case LT_EXPR:
3002 high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
3003 low = TYPE_MIN_VALUE (type);
3004 inverted = 0;
3005 break;
3007 default:
3008 return 0;
3011 *hi_p = high;
3012 *lo_p = low;
3013 *inverted_p = inverted;
3014 return 1;
3017 /* Record a range created by COND for basic block BB. */
3019 static void
3020 record_range (tree cond, basic_block bb, varray_type *vrp_variables_p)
3022 /* We explicitly ignore NE_EXPRs. They rarely allow for meaningful
3023 range optimizations and significantly complicate the implementation. */
3024 if (COMPARISON_CLASS_P (cond)
3025 && TREE_CODE (cond) != NE_EXPR
3026 && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
3028 struct vrp_element *element = ggc_alloc (sizeof (struct vrp_element));
3029 int ssa_version = SSA_NAME_VERSION (TREE_OPERAND (cond, 0));
3031 varray_type *vrp_records_p
3032 = (varray_type *)&VARRAY_GENERIC_PTR (vrp_data, ssa_version);
3034 element->low = NULL;
3035 element->high = NULL;
3036 element->cond = cond;
3037 element->bb = bb;
3039 if (*vrp_records_p == NULL)
3041 VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
3042 VARRAY_GENERIC_PTR (vrp_data, ssa_version) = *vrp_records_p;
3045 VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
3046 if (! *vrp_variables_p)
3047 VARRAY_TREE_INIT (*vrp_variables_p, 2, "vrp_variables");
3048 VARRAY_PUSH_TREE (*vrp_variables_p, TREE_OPERAND (cond, 0));
3052 /* Given a conditional statement IF_STMT, return the assignment 'X = Y'
3053 known to be true depending on which arm of IF_STMT is taken.
3055 Not all conditional statements will result in a useful assignment.
3056 Return NULL_TREE in that case.
3058 Also enter into the available expression table statements of
3059 the form:
3061 TRUE ARM FALSE ARM
3062 1 = cond 1 = cond'
3063 0 = cond' 0 = cond
3065 This allows us to lookup the condition in a dominated block and
3066 get back a constant indicating if the condition is true. */
3068 static struct eq_expr_value
3069 get_eq_expr_value (tree if_stmt,
3070 int true_arm,
3071 basic_block bb,
3072 varray_type *vrp_variables_p)
3074 tree cond;
3075 struct eq_expr_value retval;
3077 cond = COND_EXPR_COND (if_stmt);
3078 retval.src = NULL;
3079 retval.dst = NULL;
3081 /* If the conditional is a single variable 'X', return 'X = 1' for
3082 the true arm and 'X = 0' on the false arm. */
3083 if (TREE_CODE (cond) == SSA_NAME)
3085 retval.dst = cond;
3086 retval.src = constant_boolean_node (true_arm, TREE_TYPE (cond));
3087 return retval;
3090 /* If we have a comparison expression, then record its result into
3091 the available expression table. */
3092 if (COMPARISON_CLASS_P (cond))
3094 tree op0 = TREE_OPERAND (cond, 0);
3095 tree op1 = TREE_OPERAND (cond, 1);
3097 /* Special case comparing booleans against a constant as we know
3098 the value of OP0 on both arms of the branch. ie, we can record
3099 an equivalence for OP0 rather than COND. */
3100 if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
3101 && TREE_CODE (op0) == SSA_NAME
3102 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
3103 && is_gimple_min_invariant (op1))
3105 if ((TREE_CODE (cond) == EQ_EXPR && true_arm)
3106 || (TREE_CODE (cond) == NE_EXPR && ! true_arm))
3108 retval.src = op1;
3110 else
3112 if (integer_zerop (op1))
3113 retval.src = boolean_true_node;
3114 else
3115 retval.src = boolean_false_node;
3117 retval.dst = op0;
3118 return retval;
3121 if (TREE_CODE (op0) == SSA_NAME
3122 && (is_gimple_min_invariant (op1) || TREE_CODE (op1) == SSA_NAME))
3124 tree inverted = invert_truthvalue (cond);
3126 /* When we find an available expression in the hash table, we replace
3127 the expression with the LHS of the statement in the hash table.
3129 So, we want to build statements such as "1 = <condition>" on the
3130 true arm and "0 = <condition>" on the false arm. That way if we
3131 find the expression in the table, we will replace it with its
3132 known constant value. Also insert inversions of the result and
3133 condition into the hash table. */
3134 if (true_arm)
3136 record_cond (cond, boolean_true_node);
3137 record_dominating_conditions (cond);
3138 record_cond (inverted, boolean_false_node);
3140 if (TREE_CONSTANT (op1))
3141 record_range (cond, bb, vrp_variables_p);
3143 /* If the conditional is of the form 'X == Y', return 'X = Y'
3144 for the true arm. */
3145 if (TREE_CODE (cond) == EQ_EXPR)
3147 retval.dst = op0;
3148 retval.src = op1;
3149 return retval;
3152 else
3155 record_cond (inverted, boolean_true_node);
3156 record_dominating_conditions (inverted);
3157 record_cond (cond, boolean_false_node);
3159 if (TREE_CONSTANT (op1))
3160 record_range (inverted, bb, vrp_variables_p);
3162 /* If the conditional is of the form 'X != Y', return 'X = Y'
3163 for the false arm. */
3164 if (TREE_CODE (cond) == NE_EXPR)
3166 retval.dst = op0;
3167 retval.src = op1;
3168 return retval;
3174 return retval;
3177 /* Hashing and equality functions for AVAIL_EXPRS. The table stores
3178 MODIFY_EXPR statements. We compute a value number for expressions using
3179 the code of the expression and the SSA numbers of its operands. */
3181 static hashval_t
3182 avail_expr_hash (const void *p)
3184 stmt_ann_t ann = ((struct expr_hash_elt *)p)->ann;
3185 tree rhs = ((struct expr_hash_elt *)p)->rhs;
3186 hashval_t val = 0;
3187 size_t i;
3188 vuse_optype vuses;
3190 /* iterative_hash_expr knows how to deal with any expression and
3191 deals with commutative operators as well, so just use it instead
3192 of duplicating such complexities here. */
3193 val = iterative_hash_expr (rhs, val);
3195 /* If the hash table entry is not associated with a statement, then we
3196 can just hash the expression and not worry about virtual operands
3197 and such. */
3198 if (!ann)
3199 return val;
3201 /* Add the SSA version numbers of every vuse operand. This is important
3202 because compound variables like arrays are not renamed in the
3203 operands. Rather, the rename is done on the virtual variable
3204 representing all the elements of the array. */
3205 vuses = VUSE_OPS (ann);
3206 for (i = 0; i < NUM_VUSES (vuses); i++)
3207 val = iterative_hash_expr (VUSE_OP (vuses, i), val);
3209 return val;
3212 static hashval_t
3213 real_avail_expr_hash (const void *p)
3215 return ((const struct expr_hash_elt *)p)->hash;
3218 static int
3219 avail_expr_eq (const void *p1, const void *p2)
3221 stmt_ann_t ann1 = ((struct expr_hash_elt *)p1)->ann;
3222 tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
3223 stmt_ann_t ann2 = ((struct expr_hash_elt *)p2)->ann;
3224 tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
3226 /* If they are the same physical expression, return true. */
3227 if (rhs1 == rhs2 && ann1 == ann2)
3228 return true;
3230 /* If their codes are not equal, then quit now. */
3231 if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
3232 return false;
3234 /* In case of a collision, both RHS have to be identical and have the
3235 same VUSE operands. */
3236 if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
3237 || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
3238 && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
3240 vuse_optype ops1 = NULL;
3241 vuse_optype ops2 = NULL;
3242 size_t num_ops1 = 0;
3243 size_t num_ops2 = 0;
3244 size_t i;
3246 if (ann1)
3248 ops1 = VUSE_OPS (ann1);
3249 num_ops1 = NUM_VUSES (ops1);
3252 if (ann2)
3254 ops2 = VUSE_OPS (ann2);
3255 num_ops2 = NUM_VUSES (ops2);
3258 /* If the number of virtual uses is different, then we consider
3259 them not equal. */
3260 if (num_ops1 != num_ops2)
3261 return false;
3263 for (i = 0; i < num_ops1; i++)
3264 if (VUSE_OP (ops1, i) != VUSE_OP (ops2, i))
3265 return false;
3267 gcc_assert (((struct expr_hash_elt *)p1)->hash
3268 == ((struct expr_hash_elt *)p2)->hash);
3269 return true;
3272 return false;
3275 /* Given STMT and a pointer to the block local definitions BLOCK_DEFS_P,
3276 register register all objects set by this statement into BLOCK_DEFS_P
3277 and CURRDEFS. */
3279 static void
3280 register_definitions_for_stmt (tree stmt)
3282 tree def;
3283 ssa_op_iter iter;
3285 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
3288 /* FIXME: We shouldn't be registering new defs if the variable
3289 doesn't need to be renamed. */
3290 register_new_def (def, &block_defs_stack);