* gimplify.c (struct gimplify_init_ctor_preeval_data): New.
[official-gcc.git] / gcc / tree-ssa-dom.c
blobd0eb6753da7fce603f1163e023414582b2b3de26
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "errors.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
45 /* This file implements optimizations on the dominator tree. */
47 /* Hash table with expressions made available during the renaming process.
48 When an assignment of the form X_i = EXPR is found, the statement is
49 stored in this table. If the same expression EXPR is later found on the
50 RHS of another statement, it is replaced with X_i (thus performing
51 global redundancy elimination). Similarly as we pass through conditionals
52 we record the conditional itself as having either a true or false value
53 in this table. */
54 static htab_t avail_exprs;
56 /* Structure for entries in the expression hash table.
58 This requires more memory for the hash table entries, but allows us
59 to avoid creating silly tree nodes and annotations for conditionals,
60 eliminates 2 global hash tables and two block local varrays.
62 It also allows us to reduce the number of hash table lookups we
63 have to perform in lookup_avail_expr and finally it allows us to
64 significantly reduce the number of calls into the hashing routine
65 itself. */
67 struct expr_hash_elt
69 /* The value (lhs) of this expression. */
70 tree lhs;
72 /* The expression (rhs) we want to record. */
73 tree rhs;
75 /* The annotation if this element corresponds to a statement. */
76 stmt_ann_t ann;
78 /* The hash value for RHS/ann. */
79 hashval_t hash;
82 /* Table of constant values and copies indexed by SSA name. When the
83 renaming pass finds an assignment of a constant (X_i = C) or a copy
84 assignment from another SSA variable (X_i = Y_j), it creates a mapping
85 between X_i and the RHS in this table. This mapping is used later on,
86 when renaming uses of X_i. If an assignment to X_i is found in this
87 table, instead of using X_i, we use the RHS of the statement stored in
88 this table (thus performing very simplistic copy and constant
89 propagation). */
90 static varray_type const_and_copies;
92 /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
93 know their exact value. */
94 static bitmap nonzero_vars;
96 /* Track whether or not we have changed the control flow graph. */
97 static bool cfg_altered;
99 /* Bitmap of blocks that have had EH statements cleaned. We should
100 remove their dead edges eventually. */
101 static bitmap need_eh_cleanup;
103 /* Statistics for dominator optimizations. */
104 struct opt_stats_d
106 long num_stmts;
107 long num_exprs_considered;
108 long num_re;
111 /* Value range propagation record. Each time we encounter a conditional
112 of the form SSA_NAME COND CONST we create a new vrp_element to record
113 how the condition affects the possible values SSA_NAME may have.
115 Each record contains the condition tested (COND), and the the range of
116 values the variable may legitimately have if COND is true. Note the
117 range of values may be a smaller range than COND specifies if we have
118 recorded other ranges for this variable. Each record also contains the
119 block in which the range was recorded for invalidation purposes.
121 Note that the current known range is computed lazily. This allows us
122 to avoid the overhead of computing ranges which are never queried.
124 When we encounter a conditional, we look for records which constrain
125 the SSA_NAME used in the condition. In some cases those records allow
126 us to determine the condition's result at compile time. In other cases
127 they may allow us to simplify the condition.
129 We also use value ranges to do things like transform signed div/mod
130 operations into unsigned div/mod or to simplify ABS_EXPRs.
132 Simple experiments have shown these optimizations to not be all that
133 useful on switch statements (much to my surprise). So switch statement
134 optimizations are not performed.
136 Note carefully we do not propagate information through each statement
137 in the block. ie, if we know variable X has a value defined of
138 [0, 25] and we encounter Y = X + 1, we do not track a value range
139 for Y (which would be [1, 26] if we cared). Similarly we do not
140 constrain values as we encounter narrowing typecasts, etc. */
142 struct vrp_element
144 /* The highest and lowest values the variable in COND may contain when
145 COND is true. Note this may not necessarily be the same values
146 tested by COND if the same variable was used in earlier conditionals.
148 Note this is computed lazily and thus can be NULL indicating that
149 the values have not been computed yet. */
150 tree low;
151 tree high;
153 /* The actual conditional we recorded. This is needed since we compute
154 ranges lazily. */
155 tree cond;
157 /* The basic block where this record was created. We use this to determine
158 when to remove records. */
159 basic_block bb;
162 static struct opt_stats_d opt_stats;
164 /* A virtual array holding value range records for the variable identified
165 by the index, SSA_VERSION. */
166 static varray_type vrp_data;
168 /* Datastructure for block local data used during the dominator walk.
169 We maintain a stack of these as we recursively walk down the
170 dominator tree. */
172 struct dom_walk_block_data
174 /* Array of all the expressions entered into the global expression
175 hash table by this block. During finalization we use this array to
176 know what expressions to remove from the global expression hash
177 table. */
178 varray_type avail_exprs;
180 /* Array of dest, src pairs that need to be restored during finalization
181 into the global const/copies table during finalization. */
182 varray_type const_and_copies;
184 /* Similarly for the nonzero state of variables that needs to be
185 restored during finalization. */
186 varray_type nonzero_vars;
188 /* Array of statements we need to rescan during finalization for newly
189 exposed variables. */
190 varray_type stmts_to_rescan;
192 /* Array of variables which have their values constrained by operations
193 in this basic block. We use this during finalization to know
194 which variables need their VRP data updated. */
195 varray_type vrp_variables;
197 /* Array of tree pairs used to restore the global currdefs to its
198 original state after completing optimization of a block and its
199 dominator children. */
200 varray_type block_defs;
203 struct eq_expr_value
205 tree src;
206 tree dst;
209 /* Local functions. */
210 static void optimize_stmt (struct dom_walk_data *,
211 basic_block bb,
212 block_stmt_iterator);
213 static inline tree get_value_for (tree, varray_type table);
214 static inline void set_value_for (tree, tree, varray_type table);
215 static tree lookup_avail_expr (tree, varray_type *, bool);
216 static struct eq_expr_value get_eq_expr_value (tree, int, varray_type *,
217 basic_block, varray_type *);
218 static hashval_t avail_expr_hash (const void *);
219 static hashval_t real_avail_expr_hash (const void *);
220 static int avail_expr_eq (const void *, const void *);
221 static void htab_statistics (FILE *, htab_t);
222 static void record_cond (tree, tree, varray_type *);
223 static void record_dominating_conditions (tree, varray_type *);
224 static void record_const_or_copy (tree, tree, varray_type *);
225 static void record_equality (tree, tree, varray_type *);
226 static tree update_rhs_and_lookup_avail_expr (tree, tree, varray_type *, bool);
227 static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *,
228 tree, int);
229 static tree simplify_cond_and_lookup_avail_expr (tree, varray_type *,
230 stmt_ann_t, int);
231 static tree simplify_switch_and_lookup_avail_expr (tree, varray_type *, int);
232 static tree find_equivalent_equality_comparison (tree);
233 static void record_range (tree, basic_block, varray_type *);
234 static bool extract_range_from_cond (tree, tree *, tree *, int *);
235 static void record_equivalences_from_phis (struct dom_walk_data *, basic_block);
236 static void record_equivalences_from_incoming_edge (struct dom_walk_data *,
237 basic_block);
238 static bool eliminate_redundant_computations (struct dom_walk_data *,
239 tree, stmt_ann_t);
240 static void record_equivalences_from_stmt (tree, varray_type *, varray_type *,
241 int, stmt_ann_t);
242 static void thread_across_edge (struct dom_walk_data *, edge);
243 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
244 static void dom_opt_initialize_block_local_data (struct dom_walk_data *,
245 basic_block, bool);
246 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
247 static void cprop_into_phis (struct dom_walk_data *, basic_block);
248 static void remove_local_expressions_from_table (varray_type locals,
249 unsigned limit,
250 htab_t table);
251 static void restore_vars_to_original_value (varray_type locals,
252 unsigned limit,
253 varray_type table);
254 static void restore_currdefs_to_original_value (varray_type locals,
255 unsigned limit);
256 static void register_definitions_for_stmt (stmt_ann_t, varray_type *);
257 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
259 /* Local version of fold that doesn't introduce cruft. */
261 static tree
262 local_fold (tree t)
264 t = fold (t);
266 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
267 may have been added by fold, and "useless" type conversions that might
268 now be apparent due to propagation. */
269 STRIP_USELESS_TYPE_CONVERSION (t);
271 return t;
274 /* Return the value associated with variable VAR in TABLE. */
276 static inline tree
277 get_value_for (tree var, varray_type table)
279 return VARRAY_TREE (table, SSA_NAME_VERSION (var));
282 /* Associate VALUE to variable VAR in TABLE. */
284 static inline void
285 set_value_for (tree var, tree value, varray_type table)
287 VARRAY_TREE (table, SSA_NAME_VERSION (var)) = value;
290 /* Jump threading, redundancy elimination and const/copy propagation.
292 This pass may expose new symbols that need to be renamed into SSA. For
293 every new symbol exposed, its corresponding bit will be set in
294 VARS_TO_RENAME. */
296 static void
297 tree_ssa_dominator_optimize (void)
299 struct dom_walk_data walk_data;
300 unsigned int i;
302 for (i = 0; i < num_referenced_vars; i++)
303 var_ann (referenced_var (i))->current_def = NULL;
305 /* Mark loop edges so we avoid threading across loop boundaries.
306 This may result in transforming natural loop into irreducible
307 region. */
308 mark_dfs_back_edges ();
310 /* Create our hash tables. */
311 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
312 VARRAY_TREE_INIT (const_and_copies, num_ssa_names, "const_and_copies");
313 nonzero_vars = BITMAP_XMALLOC ();
314 VARRAY_GENERIC_PTR_INIT (vrp_data, num_ssa_names, "vrp_data");
315 need_eh_cleanup = BITMAP_XMALLOC ();
317 /* Setup callbacks for the generic dominator tree walker. */
318 walk_data.walk_stmts_backward = false;
319 walk_data.dom_direction = CDI_DOMINATORS;
320 walk_data.initialize_block_local_data = dom_opt_initialize_block_local_data;
321 walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
322 walk_data.before_dom_children_walk_stmts = optimize_stmt;
323 walk_data.before_dom_children_after_stmts = cprop_into_phis;
324 walk_data.after_dom_children_before_stmts = NULL;
325 walk_data.after_dom_children_walk_stmts = NULL;
326 walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
327 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
328 When we attach more stuff we'll need to fill this out with a real
329 structure. */
330 walk_data.global_data = NULL;
331 walk_data.block_local_data_size = sizeof (struct dom_walk_block_data);
333 /* Now initialize the dominator walker. */
334 init_walk_dominator_tree (&walk_data);
336 calculate_dominance_info (CDI_DOMINATORS);
338 /* If we prove certain blocks are unreachable, then we want to
339 repeat the dominator optimization process as PHI nodes may
340 have turned into copies which allows better propagation of
341 values. So we repeat until we do not identify any new unreachable
342 blocks. */
345 /* Optimize the dominator tree. */
346 cfg_altered = false;
348 /* Recursively walk the dominator tree optimizing statements. */
349 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
351 /* If we exposed any new variables, go ahead and put them into
352 SSA form now, before we handle jump threading. This simplifies
353 interactions between rewriting of _DECL nodes into SSA form
354 and rewriting SSA_NAME nodes into SSA form after block
355 duplication and CFG manipulation. */
356 if (bitmap_first_set_bit (vars_to_rename) >= 0)
358 rewrite_into_ssa (false);
359 bitmap_clear (vars_to_rename);
362 /* Thread jumps, creating duplicate blocks as needed. */
363 cfg_altered = thread_through_all_blocks ();
365 /* Removal of statements may make some EH edges dead. Purge
366 such edges from the CFG as needed. */
367 if (bitmap_first_set_bit (need_eh_cleanup) >= 0)
369 cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
370 bitmap_zero (need_eh_cleanup);
373 free_dominance_info (CDI_DOMINATORS);
374 cfg_altered = cleanup_tree_cfg ();
375 calculate_dominance_info (CDI_DOMINATORS);
377 rewrite_ssa_into_ssa ();
379 if (VARRAY_ACTIVE_SIZE (const_and_copies) <= num_ssa_names)
381 VARRAY_GROW (const_and_copies, num_ssa_names);
382 VARRAY_GROW (vrp_data, num_ssa_names);
385 /* Reinitialize the various tables. */
386 bitmap_clear (nonzero_vars);
387 htab_empty (avail_exprs);
388 VARRAY_CLEAR (const_and_copies);
389 VARRAY_CLEAR (vrp_data);
391 for (i = 0; i < num_referenced_vars; i++)
392 var_ann (referenced_var (i))->current_def = NULL;
394 while (cfg_altered);
396 /* Debugging dumps. */
397 if (dump_file && (dump_flags & TDF_STATS))
398 dump_dominator_optimization_stats (dump_file);
400 /* We emptied the hash table earlier, now delete it completely. */
401 htab_delete (avail_exprs);
403 /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
404 CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
405 of the do-while loop above. */
407 /* And finalize the dominator walker. */
408 fini_walk_dominator_tree (&walk_data);
410 /* Free nonzero_vars. */
411 BITMAP_XFREE (nonzero_vars);
412 BITMAP_XFREE (need_eh_cleanup);
415 static bool
416 gate_dominator (void)
418 return flag_tree_dom != 0;
421 struct tree_opt_pass pass_dominator =
423 "dom", /* name */
424 gate_dominator, /* gate */
425 tree_ssa_dominator_optimize, /* execute */
426 NULL, /* sub */
427 NULL, /* next */
428 0, /* static_pass_number */
429 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
430 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
431 0, /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 TODO_dump_func | TODO_rename_vars
435 | TODO_verify_ssa /* todo_flags_finish */
439 /* We are exiting BB, see if the target block begins with a conditional
440 jump which has a known value when reached via BB. */
442 static void
443 thread_across_edge (struct dom_walk_data *walk_data, edge e)
445 struct dom_walk_block_data *bd
446 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
447 block_stmt_iterator bsi;
448 tree stmt = NULL;
449 tree phi;
451 /* Each PHI creates a temporary equivalence, record them. */
452 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
454 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
455 tree dst = PHI_RESULT (phi);
456 record_const_or_copy (dst, src, &bd->const_and_copies);
457 register_new_def (dst, &bd->block_defs);
460 for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
462 tree lhs, cached_lhs;
464 stmt = bsi_stmt (bsi);
466 /* Ignore empty statements and labels. */
467 if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
468 continue;
470 /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
471 value, then stop our search here. Ideally when we stop a
472 search we stop on a COND_EXPR or SWITCH_EXPR. */
473 if (TREE_CODE (stmt) != MODIFY_EXPR
474 || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
475 break;
477 /* At this point we have a statement which assigns an RHS to an
478 SSA_VAR on the LHS. We want to prove that the RHS is already
479 available and that its value is held in the current definition
480 of the LHS -- meaning that this assignment is a NOP when
481 reached via edge E. */
482 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
483 cached_lhs = TREE_OPERAND (stmt, 1);
484 else
485 cached_lhs = lookup_avail_expr (stmt, NULL, false);
487 lhs = TREE_OPERAND (stmt, 0);
489 /* This can happen if we thread around to the start of a loop. */
490 if (lhs == cached_lhs)
491 break;
493 /* If we did not find RHS in the hash table, then try again after
494 temporarily const/copy propagating the operands. */
495 if (!cached_lhs)
497 /* Copy the operands. */
498 stmt_ann_t ann = stmt_ann (stmt);
499 use_optype uses = USE_OPS (ann);
500 vuse_optype vuses = VUSE_OPS (ann);
501 tree *uses_copy = xcalloc (NUM_USES (uses), sizeof (tree));
502 tree *vuses_copy = xcalloc (NUM_VUSES (vuses), sizeof (tree));
503 unsigned int i;
505 /* Make a copy of the uses into USES_COPY, then cprop into
506 the use operands. */
507 for (i = 0; i < NUM_USES (uses); i++)
509 tree tmp = NULL;
511 uses_copy[i] = USE_OP (uses, i);
512 if (TREE_CODE (USE_OP (uses, i)) == SSA_NAME)
513 tmp = get_value_for (USE_OP (uses, i), const_and_copies);
514 if (tmp)
515 SET_USE_OP (uses, i, tmp);
518 /* Similarly for virtual uses. */
519 for (i = 0; i < NUM_VUSES (vuses); i++)
521 tree tmp = NULL;
523 vuses_copy[i] = VUSE_OP (vuses, i);
524 if (TREE_CODE (VUSE_OP (vuses, i)) == SSA_NAME)
525 tmp = get_value_for (VUSE_OP (vuses, i), const_and_copies);
526 if (tmp)
527 SET_VUSE_OP (vuses, i, tmp);
530 /* Try to lookup the new expression. */
531 cached_lhs = lookup_avail_expr (stmt, NULL, false);
533 /* Restore the statement's original uses/defs. */
534 for (i = 0; i < NUM_USES (uses); i++)
535 SET_USE_OP (uses, i, uses_copy[i]);
537 for (i = 0; i < NUM_VUSES (vuses); i++)
538 SET_VUSE_OP (vuses, i, vuses_copy[i]);
540 free (uses_copy);
541 free (vuses_copy);
543 /* If we still did not find the expression in the hash table,
544 then we can not ignore this statement. */
545 if (! cached_lhs)
546 break;
549 /* If the expression in the hash table was not assigned to an
550 SSA_NAME, then we can not ignore this statement. */
551 if (TREE_CODE (cached_lhs) != SSA_NAME)
552 break;
554 /* If we have different underlying variables, then we can not
555 ignore this statement. */
556 if (SSA_NAME_VAR (cached_lhs) != SSA_NAME_VAR (lhs))
557 break;
559 /* If CACHED_LHS does not represent the current value of the undering
560 variable in CACHED_LHS/LHS, then we can not ignore this statement. */
561 if (var_ann (SSA_NAME_VAR (lhs))->current_def != cached_lhs)
562 break;
564 /* If we got here, then we can ignore this statement and continue
565 walking through the statements in the block looking for a threadable
566 COND_EXPR.
568 We want to record an equivalence lhs = cache_lhs so that if
569 the result of this statement is used later we can copy propagate
570 suitably. */
571 record_const_or_copy (lhs, cached_lhs, &bd->const_and_copies);
572 register_new_def (lhs, &bd->block_defs);
575 /* If we stopped at a COND_EXPR or SWITCH_EXPR, then see if we know which
576 arm will be taken. */
577 if (stmt
578 && (TREE_CODE (stmt) == COND_EXPR
579 || TREE_CODE (stmt) == SWITCH_EXPR))
581 tree cond, cached_lhs;
582 edge e1;
584 /* Do not forward entry edges into the loop. In the case loop
585 has multiple entry edges we may end up in constructing irreducible
586 region.
587 ??? We may consider forwarding the edges in the case all incoming
588 edges forward to the same destination block. */
589 if (!e->flags & EDGE_DFS_BACK)
591 for (e1 = e->dest->pred; e; e = e->pred_next)
592 if (e1->flags & EDGE_DFS_BACK)
593 break;
594 if (e1)
595 return;
598 /* Now temporarily cprop the operands and try to find the resulting
599 expression in the hash tables. */
600 if (TREE_CODE (stmt) == COND_EXPR)
601 cond = COND_EXPR_COND (stmt);
602 else
603 cond = SWITCH_COND (stmt);
605 if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<')
607 tree dummy_cond, op0, op1;
608 enum tree_code cond_code;
610 op0 = TREE_OPERAND (cond, 0);
611 op1 = TREE_OPERAND (cond, 1);
612 cond_code = TREE_CODE (cond);
614 /* Get the current value of both operands. */
615 if (TREE_CODE (op0) == SSA_NAME)
617 tree tmp = get_value_for (op0, const_and_copies);
618 if (tmp)
619 op0 = tmp;
622 if (TREE_CODE (op1) == SSA_NAME)
624 tree tmp = get_value_for (op1, const_and_copies);
625 if (tmp)
626 op1 = tmp;
629 /* Stuff the operator and operands into our dummy conditional
630 expression, creating the dummy conditional if necessary. */
631 dummy_cond = walk_data->global_data;
632 if (! dummy_cond)
634 dummy_cond = build (cond_code, boolean_type_node, op0, op1);
635 dummy_cond = build (COND_EXPR, void_type_node,
636 dummy_cond, NULL, NULL);
637 walk_data->global_data = dummy_cond;
639 else
641 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), cond_code);
642 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op0;
643 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) = op1;
646 /* If the conditional folds to an invariant, then we are done,
647 otherwise look it up in the hash tables. */
648 cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
649 if (! is_gimple_min_invariant (cached_lhs))
650 cached_lhs = lookup_avail_expr (dummy_cond, NULL, false);
651 if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
653 cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
654 NULL,
655 NULL,
656 false);
659 /* We can have conditionals which just test the state of a
660 variable rather than use a relational operator. These are
661 simpler to handle. */
662 else if (TREE_CODE (cond) == SSA_NAME)
664 cached_lhs = cond;
665 cached_lhs = get_value_for (cached_lhs, const_and_copies);
666 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
667 cached_lhs = 0;
669 else
670 cached_lhs = lookup_avail_expr (stmt, NULL, false);
672 if (cached_lhs)
674 edge taken_edge = find_taken_edge (e->dest, cached_lhs);
675 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
677 if (dest == e->dest)
678 return;
680 /* If we have a known destination for the conditional, then
681 we can perform this optimization, which saves at least one
682 conditional jump each time it applies since we get to
683 bypass the conditional at our original destination. */
684 if (dest)
686 e->aux = taken_edge;
687 bb_ann (e->dest)->incoming_edge_threaded = true;
694 /* Initialize the local stacks.
696 AVAIL_EXPRS stores all the expressions made available in this block.
698 CONST_AND_COPIES stores var/value pairs to restore at the end of this
699 block.
701 NONZERO_VARS stores the vars which have a nonzero value made in this
702 block.
704 STMTS_TO_RESCAN is a list of statements we will rescan for operands.
706 VRP_VARIABLES is the list of variables which have had their values
707 constrained by an operation in this block.
709 These stacks are cleared in the finalization routine run for each
710 block. */
712 static void
713 dom_opt_initialize_block_local_data (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
714 basic_block bb ATTRIBUTE_UNUSED,
715 bool recycled ATTRIBUTE_UNUSED)
717 #ifdef ENABLE_CHECKING
718 struct dom_walk_block_data *bd
719 = (struct dom_walk_block_data *)VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
721 /* We get cleared memory from the allocator, so if the memory is not
722 cleared, then we are re-using a previously allocated entry. In
723 that case, we can also re-use the underlying virtual arrays. Just
724 make sure we clear them before using them! */
725 if (recycled)
727 if (bd->avail_exprs && VARRAY_ACTIVE_SIZE (bd->avail_exprs) > 0)
728 abort ();
729 if (bd->const_and_copies && VARRAY_ACTIVE_SIZE (bd->const_and_copies) > 0)
730 abort ();
731 if (bd->nonzero_vars && VARRAY_ACTIVE_SIZE (bd->nonzero_vars) > 0)
732 abort ();
733 if (bd->stmts_to_rescan && VARRAY_ACTIVE_SIZE (bd->stmts_to_rescan) > 0)
734 abort ();
735 if (bd->vrp_variables && VARRAY_ACTIVE_SIZE (bd->vrp_variables) > 0)
736 abort ();
737 if (bd->block_defs && VARRAY_ACTIVE_SIZE (bd->block_defs) > 0)
738 abort ();
740 #endif
743 /* Initialize local stacks for this optimizer and record equivalences
744 upon entry to BB. Equivalences can come from the edge traversed to
745 reach BB or they may come from PHI nodes at the start of BB. */
747 static void
748 dom_opt_initialize_block (struct dom_walk_data *walk_data, basic_block bb)
750 if (dump_file && (dump_flags & TDF_DETAILS))
751 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
753 record_equivalences_from_incoming_edge (walk_data, bb);
755 /* PHI nodes can create equivalences too. */
756 record_equivalences_from_phis (walk_data, bb);
759 /* Given an expression EXPR (a relational expression or a statement),
760 initialize the hash table element pointed by by ELEMENT. */
762 static void
763 initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
765 /* Hash table elements may be based on conditional expressions or statements.
767 For the former case, we have no annotation and we want to hash the
768 conditional expression. In the latter case we have an annotation and
769 we want to record the expression the statement evaluates. */
770 if (TREE_CODE_CLASS (TREE_CODE (expr)) == '<'
771 || TREE_CODE (expr) == TRUTH_NOT_EXPR)
773 element->ann = NULL;
774 element->rhs = expr;
776 else if (TREE_CODE (expr) == COND_EXPR)
778 element->ann = stmt_ann (expr);
779 element->rhs = COND_EXPR_COND (expr);
781 else if (TREE_CODE (expr) == SWITCH_EXPR)
783 element->ann = stmt_ann (expr);
784 element->rhs = SWITCH_COND (expr);
786 else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
788 element->ann = stmt_ann (expr);
789 element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
791 else
793 element->ann = stmt_ann (expr);
794 element->rhs = TREE_OPERAND (expr, 1);
797 element->lhs = lhs;
798 element->hash = avail_expr_hash (element);
801 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
802 LIMIT entries left in LOCALs. */
804 static void
805 remove_local_expressions_from_table (varray_type locals,
806 unsigned limit,
807 htab_t table)
809 if (! locals)
810 return;
812 /* Remove all the expressions made available in this block. */
813 while (VARRAY_ACTIVE_SIZE (locals) > limit)
815 struct expr_hash_elt element;
816 tree expr = VARRAY_TOP_TREE (locals);
817 VARRAY_POP (locals);
819 initialize_hash_element (expr, NULL, &element);
820 htab_remove_elt_with_hash (table, &element, element.hash);
824 /* Use the SSA_NAMES in LOCALS to restore TABLE to its original
825 state, stopping when there are LIMIT entries left in LOCALs. */
827 static void
828 restore_nonzero_vars_to_original_value (varray_type locals,
829 unsigned limit,
830 bitmap table)
832 if (!locals)
833 return;
835 while (VARRAY_ACTIVE_SIZE (locals) > limit)
837 tree name = VARRAY_TOP_TREE (locals);
838 VARRAY_POP (locals);
839 bitmap_clear_bit (table, SSA_NAME_VERSION (name));
843 /* Use the source/dest pairs in LOCALS to restore TABLE to its original
844 state, stopping when there are LIMIT entries left in LOCALs. */
846 static void
847 restore_vars_to_original_value (varray_type locals,
848 unsigned limit,
849 varray_type table)
851 if (! locals)
852 return;
854 while (VARRAY_ACTIVE_SIZE (locals) > limit)
856 tree prev_value, dest;
858 prev_value = VARRAY_TOP_TREE (locals);
859 VARRAY_POP (locals);
860 dest = VARRAY_TOP_TREE (locals);
861 VARRAY_POP (locals);
863 set_value_for (dest, prev_value, table);
867 /* Similar to restore_vars_to_original_value, except that it restores
868 CURRDEFS to its original value. */
869 static void
870 restore_currdefs_to_original_value (varray_type locals, unsigned limit)
872 if (!locals)
873 return;
875 /* Restore CURRDEFS to its original state. */
876 while (VARRAY_ACTIVE_SIZE (locals) > limit)
878 tree tmp = VARRAY_TOP_TREE (locals);
879 tree saved_def, var;
881 VARRAY_POP (locals);
883 /* If we recorded an SSA_NAME, then make the SSA_NAME the current
884 definition of its underlying variable. If we recorded anything
885 else, it must have been an _DECL node and its current reaching
886 definition must have been NULL. */
887 if (TREE_CODE (tmp) == SSA_NAME)
889 saved_def = tmp;
890 var = SSA_NAME_VAR (saved_def);
892 else
894 saved_def = NULL;
895 var = tmp;
898 var_ann (var)->current_def = saved_def;
902 /* We have finished processing the dominator children of BB, perform
903 any finalization actions in preparation for leaving this node in
904 the dominator tree. */
906 static void
907 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
909 struct dom_walk_block_data *bd
910 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
911 tree last;
913 /* If we are at a leaf node in the dominator graph, see if we can thread
914 the edge from BB through its successor.
916 Do this before we remove entries from our equivalence tables. */
917 if (bb->succ
918 && ! bb->succ->succ_next
919 && (bb->succ->flags & EDGE_ABNORMAL) == 0
920 && (get_immediate_dominator (CDI_DOMINATORS, bb->succ->dest) != bb
921 || phi_nodes (bb->succ->dest)))
924 thread_across_edge (walk_data, bb->succ);
926 else if ((last = last_stmt (bb))
927 && TREE_CODE (last) == COND_EXPR
928 && (TREE_CODE_CLASS (TREE_CODE (COND_EXPR_COND (last))) == '<'
929 || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
930 && bb->succ
931 && (bb->succ->flags & EDGE_ABNORMAL) == 0
932 && bb->succ->succ_next
933 && (bb->succ->succ_next->flags & EDGE_ABNORMAL) == 0
934 && ! bb->succ->succ_next->succ_next)
936 edge true_edge, false_edge;
937 tree cond, inverted = NULL;
938 enum tree_code cond_code;
940 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
942 cond = COND_EXPR_COND (last);
943 cond_code = TREE_CODE (cond);
945 if (TREE_CODE_CLASS (cond_code) == '<')
946 inverted = invert_truthvalue (cond);
948 /* If the THEN arm is the end of a dominator tree or has PHI nodes,
949 then try to thread through its edge. */
950 if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb
951 || phi_nodes (true_edge->dest))
953 unsigned avail_expr_limit;
954 unsigned const_and_copies_limit;
955 unsigned currdefs_limit;
957 avail_expr_limit
958 = bd->avail_exprs ? VARRAY_ACTIVE_SIZE (bd->avail_exprs) : 0;
959 const_and_copies_limit
960 = bd->const_and_copies ? VARRAY_ACTIVE_SIZE (bd->const_and_copies)
961 : 0;
962 currdefs_limit
963 = bd->block_defs ? VARRAY_ACTIVE_SIZE (bd->block_defs) : 0;
965 /* Record any equivalences created by following this edge. */
966 if (TREE_CODE_CLASS (cond_code) == '<')
968 record_cond (cond, boolean_true_node, &bd->avail_exprs);
969 record_dominating_conditions (cond, &bd->avail_exprs);
970 record_cond (inverted, boolean_false_node, &bd->avail_exprs);
972 else if (cond_code == SSA_NAME)
973 record_const_or_copy (cond, boolean_true_node,
974 &bd->const_and_copies);
976 /* Now thread the edge. */
977 thread_across_edge (walk_data, true_edge);
979 /* And restore the various tables to their state before
980 we threaded this edge. */
981 remove_local_expressions_from_table (bd->avail_exprs,
982 avail_expr_limit,
983 avail_exprs);
984 restore_vars_to_original_value (bd->const_and_copies,
985 const_and_copies_limit,
986 const_and_copies);
987 restore_currdefs_to_original_value (bd->block_defs, currdefs_limit);
990 /* Similarly for the ELSE arm. */
991 if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb
992 || phi_nodes (false_edge->dest))
994 /* Record any equivalences created by following this edge. */
995 if (TREE_CODE_CLASS (cond_code) == '<')
997 record_cond (cond, boolean_false_node, &bd->avail_exprs);
998 record_cond (inverted, boolean_true_node, &bd->avail_exprs);
999 record_dominating_conditions (inverted, &bd->avail_exprs);
1001 else if (cond_code == SSA_NAME)
1002 record_const_or_copy (cond, boolean_false_node,
1003 &bd->const_and_copies);
1005 thread_across_edge (walk_data, false_edge);
1007 /* No need to remove local expressions from our tables
1008 or restore vars to their original value as that will
1009 be done immediately below. */
1013 remove_local_expressions_from_table (bd->avail_exprs, 0, avail_exprs);
1014 restore_nonzero_vars_to_original_value (bd->nonzero_vars, 0, nonzero_vars);
1015 restore_vars_to_original_value (bd->const_and_copies, 0, const_and_copies);
1016 restore_currdefs_to_original_value (bd->block_defs, 0);
1018 /* Remove VRP records associated with this basic block. They are no
1019 longer valid.
1021 To be efficient, we note which variables have had their values
1022 constrained in this block. So walk over each variable in the
1023 VRP_VARIABLEs array. */
1024 while (bd->vrp_variables && VARRAY_ACTIVE_SIZE (bd->vrp_variables) > 0)
1026 tree var = VARRAY_TOP_TREE (bd->vrp_variables);
1028 /* Each variable has a stack of value range records. We want to
1029 invalidate those associated with our basic block. So we walk
1030 the array backwards popping off records associated with our
1031 block. Once we hit a record not associated with our block
1032 we are done. */
1033 varray_type var_vrp_records = VARRAY_GENERIC_PTR (vrp_data,
1034 SSA_NAME_VERSION (var));
1036 while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0)
1038 struct vrp_element *element
1039 = (struct vrp_element *)VARRAY_TOP_GENERIC_PTR (var_vrp_records);
1041 if (element->bb != bb)
1042 break;
1044 VARRAY_POP (var_vrp_records);
1047 VARRAY_POP (bd->vrp_variables);
1050 /* Re-scan operands in all statements that may have had new symbols
1051 exposed. */
1052 while (bd->stmts_to_rescan && VARRAY_ACTIVE_SIZE (bd->stmts_to_rescan) > 0)
1054 tree stmt = VARRAY_TOP_TREE (bd->stmts_to_rescan);
1055 VARRAY_POP (bd->stmts_to_rescan);
1056 mark_new_vars_to_rename (stmt, vars_to_rename);
1060 /* PHI nodes can create equivalences too.
1062 Ignoring any alternatives which are the same as the result, if
1063 all the alternatives are equal, then the PHI node creates an
1064 equivalence.
1066 Additionally, if all the PHI alternatives are known to have a nonzero
1067 value, then the result of this PHI is known to have a nonzero value,
1068 even if we do not know its exact value. */
1070 static void
1071 record_equivalences_from_phis (struct dom_walk_data *walk_data, basic_block bb)
1073 struct dom_walk_block_data *bd
1074 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
1075 tree phi;
1077 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1079 tree lhs = PHI_RESULT (phi);
1080 tree rhs = NULL;
1081 int i;
1083 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1085 tree t = PHI_ARG_DEF (phi, i);
1087 if (TREE_CODE (t) == SSA_NAME || is_gimple_min_invariant (t))
1089 /* Ignore alternatives which are the same as our LHS. */
1090 if (operand_equal_p (lhs, t, 0))
1091 continue;
1093 /* If we have not processed an alternative yet, then set
1094 RHS to this alternative. */
1095 if (rhs == NULL)
1096 rhs = t;
1097 /* If we have processed an alternative (stored in RHS), then
1098 see if it is equal to this one. If it isn't, then stop
1099 the search. */
1100 else if (! operand_equal_p (rhs, t, 0))
1101 break;
1103 else
1104 break;
1107 /* If we had no interesting alternatives, then all the RHS alternatives
1108 must have been the same as LHS. */
1109 if (!rhs)
1110 rhs = lhs;
1112 /* If we managed to iterate through each PHI alternative without
1113 breaking out of the loop, then we have a PHI which may create
1114 a useful equivalence. We do not need to record unwind data for
1115 this, since this is a true assignment and not an equivalence
1116 inferred from a comparison. All uses of this ssa name are dominated
1117 by this assignment, so unwinding just costs time and space. */
1118 if (i == PHI_NUM_ARGS (phi)
1119 && may_propagate_copy (lhs, rhs))
1120 set_value_for (lhs, rhs, const_and_copies);
1122 /* Now see if we know anything about the nonzero property for the
1123 result of this PHI. */
1124 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1126 if (!PHI_ARG_NONZERO (phi, i))
1127 break;
1130 if (i == PHI_NUM_ARGS (phi))
1131 bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
1133 register_new_def (lhs, &bd->block_defs);
1137 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1138 return that edge. Otherwise return NULL. */
1139 static edge
1140 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1142 edge retval = NULL;
1143 edge e;
1145 for (e = bb->pred; e; e = e->pred_next)
1147 /* A loop back edge can be identified by the destination of
1148 the edge dominating the source of the edge. */
1149 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1150 continue;
1152 /* If we have already seen a non-loop edge, then we must have
1153 multiple incoming non-loop edges and thus we return NULL. */
1154 if (retval)
1155 return NULL;
1157 /* This is the first non-loop incoming edge we have found. Record
1158 it. */
1159 retval = e;
1162 return retval;
1165 /* Record any equivalences created by the incoming edge to BB. If BB
1166 has more than one incoming edge, then no equivalence is created. */
1168 static void
1169 record_equivalences_from_incoming_edge (struct dom_walk_data *walk_data,
1170 basic_block bb)
1172 int edge_flags;
1173 basic_block parent;
1174 struct eq_expr_value eq_expr_value;
1175 tree parent_block_last_stmt = NULL;
1176 struct dom_walk_block_data *bd
1177 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
1179 /* If our parent block ended with a control statment, then we may be
1180 able to record some equivalences based on which outgoing edge from
1181 the parent was followed. */
1182 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1183 if (parent)
1185 parent_block_last_stmt = last_stmt (parent);
1186 if (parent_block_last_stmt && !is_ctrl_stmt (parent_block_last_stmt))
1187 parent_block_last_stmt = NULL;
1190 eq_expr_value.src = NULL;
1191 eq_expr_value.dst = NULL;
1193 /* If we have a single predecessor (ignoring loop backedges), then extract
1194 EDGE_FLAGS from the single incoming edge. Otherwise just return as
1195 there is nothing to do. */
1196 if (bb->pred
1197 && parent_block_last_stmt)
1199 edge e = single_incoming_edge_ignoring_loop_edges (bb);
1200 if (e && bb_for_stmt (parent_block_last_stmt) == e->src)
1201 edge_flags = e->flags;
1202 else
1203 return;
1205 else
1206 return;
1208 /* If our parent block ended in a COND_EXPR, add any equivalences
1209 created by the COND_EXPR to the hash table and initialize
1210 EQ_EXPR_VALUE appropriately.
1212 EQ_EXPR_VALUE is an assignment expression created when BB's immediate
1213 dominator ends in a COND_EXPR statement whose predicate is of the form
1214 'VAR == VALUE', where VALUE may be another variable or a constant.
1215 This is used to propagate VALUE on the THEN_CLAUSE of that
1216 conditional. This assignment is inserted in CONST_AND_COPIES so that
1217 the copy and constant propagator can find more propagation
1218 opportunities. */
1219 if (TREE_CODE (parent_block_last_stmt) == COND_EXPR
1220 && (edge_flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1221 eq_expr_value = get_eq_expr_value (parent_block_last_stmt,
1222 (edge_flags & EDGE_TRUE_VALUE) != 0,
1223 &bd->avail_exprs,
1225 &bd->vrp_variables);
1226 /* Similarly when the parent block ended in a SWITCH_EXPR.
1227 We can only know the value of the switch's condition if the dominator
1228 parent is also the only predecessor of this block. */
1229 else if (bb->pred->src == parent
1230 && TREE_CODE (parent_block_last_stmt) == SWITCH_EXPR)
1232 tree switch_cond = SWITCH_COND (parent_block_last_stmt);
1234 /* If the switch's condition is an SSA variable, then we may
1235 know its value at each of the case labels. */
1236 if (TREE_CODE (switch_cond) == SSA_NAME)
1238 tree switch_vec = SWITCH_LABELS (parent_block_last_stmt);
1239 size_t i, n = TREE_VEC_LENGTH (switch_vec);
1240 int case_count = 0;
1241 tree match_case = NULL_TREE;
1243 /* Search the case labels for those whose destination is
1244 the current basic block. */
1245 for (i = 0; i < n; ++i)
1247 tree elt = TREE_VEC_ELT (switch_vec, i);
1248 if (label_to_block (CASE_LABEL (elt)) == bb)
1250 if (++case_count > 1 || CASE_HIGH (elt))
1251 break;
1252 match_case = elt;
1256 /* If we encountered precisely one CASE_LABEL_EXPR and it
1257 was not the default case, or a case range, then we know
1258 the exact value of SWITCH_COND which caused us to get to
1259 this block. Record that equivalence in EQ_EXPR_VALUE. */
1260 if (case_count == 1
1261 && match_case
1262 && CASE_LOW (match_case)
1263 && !CASE_HIGH (match_case))
1265 eq_expr_value.dst = switch_cond;
1266 eq_expr_value.src = fold_convert (TREE_TYPE (switch_cond),
1267 CASE_LOW (match_case));
1272 /* If EQ_EXPR_VALUE (VAR == VALUE) is given, register the VALUE as a
1273 new value for VAR, so that occurrences of VAR can be replaced with
1274 VALUE while re-writing the THEN arm of a COND_EXPR. */
1275 if (eq_expr_value.src && eq_expr_value.dst)
1276 record_equality (eq_expr_value.dst, eq_expr_value.src,
1277 &bd->const_and_copies);
1280 /* Dump SSA statistics on FILE. */
1282 void
1283 dump_dominator_optimization_stats (FILE *file)
1285 long n_exprs;
1287 fprintf (file, "Total number of statements: %6ld\n\n",
1288 opt_stats.num_stmts);
1289 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1290 opt_stats.num_exprs_considered);
1292 n_exprs = opt_stats.num_exprs_considered;
1293 if (n_exprs == 0)
1294 n_exprs = 1;
1296 fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
1297 opt_stats.num_re, PERCENT (opt_stats.num_re,
1298 n_exprs));
1300 fprintf (file, "\nHash table statistics:\n");
1302 fprintf (file, " avail_exprs: ");
1303 htab_statistics (file, avail_exprs);
1307 /* Dump SSA statistics on stderr. */
1309 void
1310 debug_dominator_optimization_stats (void)
1312 dump_dominator_optimization_stats (stderr);
1316 /* Dump statistics for the hash table HTAB. */
1318 static void
1319 htab_statistics (FILE *file, htab_t htab)
1321 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1322 (long) htab_size (htab),
1323 (long) htab_elements (htab),
1324 htab_collisions (htab));
1327 /* Record the fact that VAR has a nonzero value, though we may not know
1328 its exact value. Note that if VAR is already known to have a nonzero
1329 value, then we do nothing. */
1331 static void
1332 record_var_is_nonzero (tree var, varray_type *block_nonzero_vars_p)
1334 int indx = SSA_NAME_VERSION (var);
1336 if (bitmap_bit_p (nonzero_vars, indx))
1337 return;
1339 /* Mark it in the global table. */
1340 bitmap_set_bit (nonzero_vars, indx);
1342 /* Record this SSA_NAME so that we can reset the global table
1343 when we leave this block. */
1344 if (! *block_nonzero_vars_p)
1345 VARRAY_TREE_INIT (*block_nonzero_vars_p, 2, "block_nonzero_vars");
1346 VARRAY_PUSH_TREE (*block_nonzero_vars_p, var);
1349 /* Enter a statement into the true/false expression hash table indicating
1350 that the condition COND has the value VALUE. */
1352 static void
1353 record_cond (tree cond, tree value, varray_type *block_avail_exprs_p)
1355 struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
1356 void **slot;
1358 initialize_hash_element (cond, value, element);
1360 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1361 element->hash, true);
1362 if (*slot == NULL)
1364 *slot = (void *) element;
1365 if (! *block_avail_exprs_p)
1366 VARRAY_TREE_INIT (*block_avail_exprs_p, 20, "block_avail_exprs");
1367 VARRAY_PUSH_TREE (*block_avail_exprs_p, cond);
1369 else
1370 free (element);
1373 /* COND is a condition which is known to be true. Record variants of
1374 COND which must also be true.
1376 For example, if a < b is true, then a <= b must also be true. */
1378 static void
1379 record_dominating_conditions (tree cond, varray_type *block_avail_exprs_p)
1381 switch (TREE_CODE (cond))
1383 case LT_EXPR:
1384 record_cond (build2 (LE_EXPR, boolean_type_node,
1385 TREE_OPERAND (cond, 0),
1386 TREE_OPERAND (cond, 1)),
1387 boolean_true_node,
1388 block_avail_exprs_p);
1389 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1390 TREE_OPERAND (cond, 0),
1391 TREE_OPERAND (cond, 1)),
1392 boolean_true_node,
1393 block_avail_exprs_p);
1394 record_cond (build2 (NE_EXPR, boolean_type_node,
1395 TREE_OPERAND (cond, 0),
1396 TREE_OPERAND (cond, 1)),
1397 boolean_true_node,
1398 block_avail_exprs_p);
1399 record_cond (build2 (LTGT_EXPR, boolean_type_node,
1400 TREE_OPERAND (cond, 0),
1401 TREE_OPERAND (cond, 1)),
1402 boolean_true_node,
1403 block_avail_exprs_p);
1404 break;
1406 case GT_EXPR:
1407 record_cond (build2 (GE_EXPR, boolean_type_node,
1408 TREE_OPERAND (cond, 0),
1409 TREE_OPERAND (cond, 1)),
1410 boolean_true_node,
1411 block_avail_exprs_p);
1412 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1413 TREE_OPERAND (cond, 0),
1414 TREE_OPERAND (cond, 1)),
1415 boolean_true_node,
1416 block_avail_exprs_p);
1417 record_cond (build2 (NE_EXPR, boolean_type_node,
1418 TREE_OPERAND (cond, 0),
1419 TREE_OPERAND (cond, 1)),
1420 boolean_true_node,
1421 block_avail_exprs_p);
1422 record_cond (build2 (LTGT_EXPR, boolean_type_node,
1423 TREE_OPERAND (cond, 0),
1424 TREE_OPERAND (cond, 1)),
1425 boolean_true_node,
1426 block_avail_exprs_p);
1427 break;
1429 case GE_EXPR:
1430 case LE_EXPR:
1431 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1432 TREE_OPERAND (cond, 0),
1433 TREE_OPERAND (cond, 1)),
1434 boolean_true_node,
1435 block_avail_exprs_p);
1436 break;
1438 case EQ_EXPR:
1439 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1440 TREE_OPERAND (cond, 0),
1441 TREE_OPERAND (cond, 1)),
1442 boolean_true_node,
1443 block_avail_exprs_p);
1444 record_cond (build2 (LE_EXPR, boolean_type_node,
1445 TREE_OPERAND (cond, 0),
1446 TREE_OPERAND (cond, 1)),
1447 boolean_true_node,
1448 block_avail_exprs_p);
1449 record_cond (build2 (GE_EXPR, boolean_type_node,
1450 TREE_OPERAND (cond, 0),
1451 TREE_OPERAND (cond, 1)),
1452 boolean_true_node,
1453 block_avail_exprs_p);
1454 break;
1456 case UNORDERED_EXPR:
1457 record_cond (build2 (NE_EXPR, boolean_type_node,
1458 TREE_OPERAND (cond, 0),
1459 TREE_OPERAND (cond, 1)),
1460 boolean_true_node,
1461 block_avail_exprs_p);
1462 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1463 TREE_OPERAND (cond, 0),
1464 TREE_OPERAND (cond, 1)),
1465 boolean_true_node,
1466 block_avail_exprs_p);
1467 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1468 TREE_OPERAND (cond, 0),
1469 TREE_OPERAND (cond, 1)),
1470 boolean_true_node,
1471 block_avail_exprs_p);
1472 record_cond (build2 (UNEQ_EXPR, boolean_type_node,
1473 TREE_OPERAND (cond, 0),
1474 TREE_OPERAND (cond, 1)),
1475 boolean_true_node,
1476 block_avail_exprs_p);
1477 record_cond (build2 (UNLT_EXPR, boolean_type_node,
1478 TREE_OPERAND (cond, 0),
1479 TREE_OPERAND (cond, 1)),
1480 boolean_true_node,
1481 block_avail_exprs_p);
1482 record_cond (build2 (UNGT_EXPR, boolean_type_node,
1483 TREE_OPERAND (cond, 0),
1484 TREE_OPERAND (cond, 1)),
1485 boolean_true_node,
1486 block_avail_exprs_p);
1487 break;
1489 case UNLT_EXPR:
1490 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1491 TREE_OPERAND (cond, 0),
1492 TREE_OPERAND (cond, 1)),
1493 boolean_true_node,
1494 block_avail_exprs_p);
1495 record_cond (build2 (NE_EXPR, boolean_type_node,
1496 TREE_OPERAND (cond, 0),
1497 TREE_OPERAND (cond, 1)),
1498 boolean_true_node,
1499 block_avail_exprs_p);
1500 break;
1502 case UNGT_EXPR:
1503 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1504 TREE_OPERAND (cond, 0),
1505 TREE_OPERAND (cond, 1)),
1506 boolean_true_node,
1507 block_avail_exprs_p);
1508 record_cond (build2 (NE_EXPR, boolean_type_node,
1509 TREE_OPERAND (cond, 0),
1510 TREE_OPERAND (cond, 1)),
1511 boolean_true_node,
1512 block_avail_exprs_p);
1513 break;
1515 case UNEQ_EXPR:
1516 record_cond (build2 (UNLE_EXPR, boolean_type_node,
1517 TREE_OPERAND (cond, 0),
1518 TREE_OPERAND (cond, 1)),
1519 boolean_true_node,
1520 block_avail_exprs_p);
1521 record_cond (build2 (UNGE_EXPR, boolean_type_node,
1522 TREE_OPERAND (cond, 0),
1523 TREE_OPERAND (cond, 1)),
1524 boolean_true_node,
1525 block_avail_exprs_p);
1526 break;
1528 case LTGT_EXPR:
1529 record_cond (build2 (NE_EXPR, boolean_type_node,
1530 TREE_OPERAND (cond, 0),
1531 TREE_OPERAND (cond, 1)),
1532 boolean_true_node,
1533 block_avail_exprs_p);
1534 record_cond (build2 (ORDERED_EXPR, boolean_type_node,
1535 TREE_OPERAND (cond, 0),
1536 TREE_OPERAND (cond, 1)),
1537 boolean_true_node,
1538 block_avail_exprs_p);
1540 default:
1541 break;
1545 /* A helper function for record_const_or_copy and record_equality.
1546 Do the work of recording the value and undo info. */
1548 static void
1549 record_const_or_copy_1 (tree x, tree y, tree prev_x,
1550 varray_type *block_const_and_copies_p)
1552 set_value_for (x, y, const_and_copies);
1554 if (!*block_const_and_copies_p)
1555 VARRAY_TREE_INIT (*block_const_and_copies_p, 2, "block_const_and_copies");
1556 VARRAY_PUSH_TREE (*block_const_and_copies_p, x);
1557 VARRAY_PUSH_TREE (*block_const_and_copies_p, prev_x);
1560 /* Record that X is equal to Y in const_and_copies. Record undo
1561 information in the block-local varray. */
1563 static void
1564 record_const_or_copy (tree x, tree y, varray_type *block_const_and_copies_p)
1566 tree prev_x = get_value_for (x, const_and_copies);
1568 if (TREE_CODE (y) == SSA_NAME)
1570 tree tmp = get_value_for (y, const_and_copies);
1571 if (tmp)
1572 y = tmp;
1575 record_const_or_copy_1 (x, y, prev_x, block_const_and_copies_p);
1578 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1579 This constrains the cases in which we may treat this as assignment. */
1581 static void
1582 record_equality (tree x, tree y, varray_type *block_const_and_copies_p)
1584 tree prev_x = NULL, prev_y = NULL;
1586 if (TREE_CODE (x) == SSA_NAME)
1587 prev_x = get_value_for (x, const_and_copies);
1588 if (TREE_CODE (y) == SSA_NAME)
1589 prev_y = get_value_for (y, const_and_copies);
1591 /* If one of the previous values is invariant, then use that.
1592 Otherwise it doesn't matter which value we choose, just so
1593 long as we canonicalize on one value. */
1594 if (TREE_INVARIANT (y))
1596 else if (TREE_INVARIANT (x))
1597 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1598 else if (prev_x && TREE_INVARIANT (prev_x))
1599 x = y, y = prev_x, prev_x = prev_y;
1600 else if (prev_y)
1601 y = prev_y;
1603 /* After the swapping, we must have one SSA_NAME. */
1604 if (TREE_CODE (x) != SSA_NAME)
1605 return;
1607 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1608 variable compared against zero. If we're honoring signed zeros,
1609 then we cannot record this value unless we know that the value is
1610 nonzero. */
1611 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1612 && (TREE_CODE (y) != REAL_CST
1613 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1614 return;
1616 record_const_or_copy_1 (x, y, prev_x, block_const_and_copies_p);
1619 /* STMT is a MODIFY_EXPR for which we were unable to find RHS in the
1620 hash tables. Try to simplify the RHS using whatever equivalences
1621 we may have recorded.
1623 If we are able to simplify the RHS, then lookup the simplified form in
1624 the hash table and return the result. Otherwise return NULL. */
1626 static tree
1627 simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data,
1628 tree stmt, int insert)
1630 tree rhs = TREE_OPERAND (stmt, 1);
1631 enum tree_code rhs_code = TREE_CODE (rhs);
1632 tree result = NULL;
1633 struct dom_walk_block_data *bd
1634 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
1636 /* If we have lhs = ~x, look and see if we earlier had x = ~y.
1637 In which case we can change this statement to be lhs = y.
1638 Which can then be copy propagated.
1640 Similarly for negation. */
1641 if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR)
1642 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1644 /* Get the definition statement for our RHS. */
1645 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1647 /* See if the RHS_DEF_STMT has the same form as our statement. */
1648 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
1649 && TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code)
1651 tree rhs_def_operand;
1653 rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0);
1655 /* Verify that RHS_DEF_OPERAND is a suitable SSA variable. */
1656 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1657 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1658 result = update_rhs_and_lookup_avail_expr (stmt,
1659 rhs_def_operand,
1660 &bd->avail_exprs,
1661 insert);
1665 /* If we have z = (x OP C1), see if we earlier had x = y OP C2.
1666 If OP is associative, create and fold (y OP C2) OP C1 which
1667 should result in (y OP C3), use that as the RHS for the
1668 assignment. Add minus to this, as we handle it specially below. */
1669 if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
1670 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
1671 && is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
1673 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1675 /* See if the RHS_DEF_STMT has the same form as our statement. */
1676 if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR)
1678 tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
1679 enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
1681 if (rhs_code == rhs_def_code
1682 || (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
1683 || (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
1685 tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0);
1686 tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1);
1688 if (TREE_CODE (def_stmt_op0) == SSA_NAME
1689 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0)
1690 && is_gimple_min_invariant (def_stmt_op1))
1692 tree outer_const = TREE_OPERAND (rhs, 1);
1693 tree type = TREE_TYPE (TREE_OPERAND (stmt, 0));
1694 tree t;
1696 /* If we care about correct floating point results, then
1697 don't fold x + c1 - c2. Note that we need to take both
1698 the codes and the signs to figure this out. */
1699 if (FLOAT_TYPE_P (type)
1700 && !flag_unsafe_math_optimizations
1701 && (rhs_def_code == PLUS_EXPR
1702 || rhs_def_code == MINUS_EXPR))
1704 bool neg = false;
1706 neg ^= (rhs_code == MINUS_EXPR);
1707 neg ^= (rhs_def_code == MINUS_EXPR);
1708 neg ^= real_isneg (TREE_REAL_CST_PTR (outer_const));
1709 neg ^= real_isneg (TREE_REAL_CST_PTR (def_stmt_op1));
1711 if (neg)
1712 goto dont_fold_assoc;
1715 /* Ho hum. So fold will only operate on the outermost
1716 thingy that we give it, so we have to build the new
1717 expression in two pieces. This requires that we handle
1718 combinations of plus and minus. */
1719 if (rhs_def_code != rhs_code)
1721 if (rhs_def_code == MINUS_EXPR)
1722 t = build (MINUS_EXPR, type, outer_const, def_stmt_op1);
1723 else
1724 t = build (MINUS_EXPR, type, def_stmt_op1, outer_const);
1725 rhs_code = PLUS_EXPR;
1727 else if (rhs_def_code == MINUS_EXPR)
1728 t = build (PLUS_EXPR, type, def_stmt_op1, outer_const);
1729 else
1730 t = build (rhs_def_code, type, def_stmt_op1, outer_const);
1731 t = local_fold (t);
1732 t = build (rhs_code, type, def_stmt_op0, t);
1733 t = local_fold (t);
1735 /* If the result is a suitable looking gimple expression,
1736 then use it instead of the original for STMT. */
1737 if (TREE_CODE (t) == SSA_NAME
1738 || (TREE_CODE_CLASS (TREE_CODE (t)) == '1'
1739 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1740 || ((TREE_CODE_CLASS (TREE_CODE (t)) == '2'
1741 || TREE_CODE_CLASS (TREE_CODE (t)) == '<')
1742 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
1743 && is_gimple_val (TREE_OPERAND (t, 1))))
1744 result = update_rhs_and_lookup_avail_expr
1745 (stmt, t, &bd->avail_exprs, insert);
1749 dont_fold_assoc:;
1752 /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR
1753 and BIT_AND_EXPR respectively if the first operand is greater
1754 than zero and the second operand is an exact power of two. */
1755 if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR)
1756 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
1757 && integer_pow2p (TREE_OPERAND (rhs, 1)))
1759 tree val;
1760 tree op = TREE_OPERAND (rhs, 0);
1762 if (TYPE_UNSIGNED (TREE_TYPE (op)))
1764 val = integer_one_node;
1766 else
1768 tree dummy_cond = walk_data->global_data;
1770 if (! dummy_cond)
1772 dummy_cond = build (GT_EXPR, boolean_type_node,
1773 op, integer_zero_node);
1774 dummy_cond = build (COND_EXPR, void_type_node,
1775 dummy_cond, NULL, NULL);
1776 walk_data->global_data = dummy_cond;
1778 else
1780 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GT_EXPR);
1781 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1782 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1783 = integer_zero_node;
1785 val = simplify_cond_and_lookup_avail_expr (dummy_cond,
1786 &bd->avail_exprs,
1787 NULL, false);
1790 if (val && integer_onep (val))
1792 tree t;
1793 tree op0 = TREE_OPERAND (rhs, 0);
1794 tree op1 = TREE_OPERAND (rhs, 1);
1796 if (rhs_code == TRUNC_DIV_EXPR)
1797 t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0,
1798 build_int_2 (tree_log2 (op1), 0));
1799 else
1800 t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0,
1801 local_fold (build (MINUS_EXPR, TREE_TYPE (op1),
1802 op1, integer_one_node)));
1804 result = update_rhs_and_lookup_avail_expr (stmt, t,
1805 &bd->avail_exprs, insert);
1809 /* Transform ABS (X) into X or -X as appropriate. */
1810 if (rhs_code == ABS_EXPR
1811 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
1813 tree val;
1814 tree op = TREE_OPERAND (rhs, 0);
1815 tree type = TREE_TYPE (op);
1817 if (TYPE_UNSIGNED (type))
1819 val = integer_zero_node;
1821 else
1823 tree dummy_cond = walk_data->global_data;
1825 if (! dummy_cond)
1827 dummy_cond = build (LE_EXPR, boolean_type_node,
1828 op, integer_zero_node);
1829 dummy_cond = build (COND_EXPR, void_type_node,
1830 dummy_cond, NULL, NULL);
1831 walk_data->global_data = dummy_cond;
1833 else
1835 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), LE_EXPR);
1836 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1837 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1838 = fold_convert (type, integer_zero_node);
1840 val = simplify_cond_and_lookup_avail_expr (dummy_cond,
1841 &bd->avail_exprs,
1842 NULL, false);
1844 if (!val)
1846 TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GE_EXPR);
1847 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
1848 TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
1849 = fold_convert (type, integer_zero_node);
1851 val = simplify_cond_and_lookup_avail_expr (dummy_cond,
1852 &bd->avail_exprs,
1853 NULL, false);
1855 if (val)
1857 if (integer_zerop (val))
1858 val = integer_one_node;
1859 else if (integer_onep (val))
1860 val = integer_zero_node;
1865 if (val
1866 && (integer_onep (val) || integer_zerop (val)))
1868 tree t;
1870 if (integer_onep (val))
1871 t = build1 (NEGATE_EXPR, TREE_TYPE (op), op);
1872 else
1873 t = op;
1875 result = update_rhs_and_lookup_avail_expr (stmt, t,
1876 &bd->avail_exprs, insert);
1880 /* Optimize *"foo" into 'f'. This is done here rather than
1881 in fold to avoid problems with stuff like &*"foo". */
1882 if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
1884 tree t = fold_read_from_constant_string (rhs);
1886 if (t)
1887 result = update_rhs_and_lookup_avail_expr (stmt, t,
1888 &bd->avail_exprs, insert);
1891 return result;
1894 /* COND is a condition of the form:
1896 x == const or x != const
1898 Look back to x's defining statement and see if x is defined as
1900 x = (type) y;
1902 If const is unchanged if we convert it to type, then we can build
1903 the equivalent expression:
1906 y == const or y != const
1908 Which may allow further optimizations.
1910 Return the equivalent comparison or NULL if no such equivalent comparison
1911 was found. */
1913 static tree
1914 find_equivalent_equality_comparison (tree cond)
1916 tree op0 = TREE_OPERAND (cond, 0);
1917 tree op1 = TREE_OPERAND (cond, 1);
1918 tree def_stmt = SSA_NAME_DEF_STMT (op0);
1920 /* OP0 might have been a parameter, so first make sure it
1921 was defined by a MODIFY_EXPR. */
1922 if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR)
1924 tree def_rhs = TREE_OPERAND (def_stmt, 1);
1926 /* Now make sure the RHS of the MODIFY_EXPR is a typecast. */
1927 if ((TREE_CODE (def_rhs) == NOP_EXPR
1928 || TREE_CODE (def_rhs) == CONVERT_EXPR)
1929 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
1931 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
1932 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
1933 tree new;
1935 if (TYPE_PRECISION (def_rhs_inner_type)
1936 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
1937 return NULL;
1939 /* What we want to prove is that if we convert OP1 to
1940 the type of the object inside the NOP_EXPR that the
1941 result is still equivalent to SRC.
1943 If that is true, the build and return new equivalent
1944 condition which uses the source of the typecast and the
1945 new constant (which has only changed its type). */
1946 new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
1947 new = local_fold (new);
1948 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
1949 return build (TREE_CODE (cond), TREE_TYPE (cond),
1950 def_rhs_inner, new);
1953 return NULL;
1956 /* STMT is a COND_EXPR for which we could not trivially determine its
1957 result. This routine attempts to find equivalent forms of the
1958 condition which we may be able to optimize better. It also
1959 uses simple value range propagation to optimize conditionals. */
1961 static tree
1962 simplify_cond_and_lookup_avail_expr (tree stmt,
1963 varray_type *block_avail_exprs_p,
1964 stmt_ann_t ann,
1965 int insert)
1967 tree cond = COND_EXPR_COND (stmt);
1969 if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<')
1971 tree op0 = TREE_OPERAND (cond, 0);
1972 tree op1 = TREE_OPERAND (cond, 1);
1974 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
1976 int limit;
1977 tree low, high, cond_low, cond_high;
1978 int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
1979 varray_type vrp_records;
1980 struct vrp_element *element;
1982 /* First see if we have test of an SSA_NAME against a constant
1983 where the SSA_NAME is defined by an earlier typecast which
1984 is irrelevant when performing tests against the given
1985 constant. */
1986 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1988 tree new_cond = find_equivalent_equality_comparison (cond);
1990 if (new_cond)
1992 /* Update the statement to use the new equivalent
1993 condition. */
1994 COND_EXPR_COND (stmt) = new_cond;
1996 /* If this is not a real stmt, ann will be NULL and we
1997 avoid processing the operands. */
1998 if (ann)
1999 modify_stmt (stmt);
2001 /* Lookup the condition and return its known value if it
2002 exists. */
2003 new_cond = lookup_avail_expr (stmt, block_avail_exprs_p,
2004 insert);
2005 if (new_cond)
2006 return new_cond;
2008 /* The operands have changed, so update op0 and op1. */
2009 op0 = TREE_OPERAND (cond, 0);
2010 op1 = TREE_OPERAND (cond, 1);
2014 /* Consult the value range records for this variable (if they exist)
2015 to see if we can eliminate or simplify this conditional.
2017 Note two tests are necessary to determine no records exist.
2018 First we have to see if the virtual array exists, if it
2019 exists, then we have to check its active size.
2021 Also note the vast majority of conditionals are not testing
2022 a variable which has had its range constrained by an earlier
2023 conditional. So this filter avoids a lot of unnecessary work. */
2024 vrp_records = VARRAY_GENERIC_PTR (vrp_data, SSA_NAME_VERSION (op0));
2025 if (vrp_records == NULL)
2026 return NULL;
2028 limit = VARRAY_ACTIVE_SIZE (vrp_records);
2030 /* If we have no value range records for this variable, or we are
2031 unable to extract a range for this condition, then there is
2032 nothing to do. */
2033 if (limit == 0
2034 || ! extract_range_from_cond (cond, &cond_high,
2035 &cond_low, &cond_inverted))
2036 return NULL;
2038 /* We really want to avoid unnecessary computations of range
2039 info. So all ranges are computed lazily; this avoids a
2040 lot of unnecessary work. ie, we record the conditional,
2041 but do not process how it constrains the variable's
2042 potential values until we know that processing the condition
2043 could be helpful.
2045 However, we do not want to have to walk a potentially long
2046 list of ranges, nor do we want to compute a variable's
2047 range more than once for a given path.
2049 Luckily, each time we encounter a conditional that can not
2050 be otherwise optimized we will end up here and we will
2051 compute the necessary range information for the variable
2052 used in this condition.
2054 Thus you can conclude that there will never be more than one
2055 conditional associated with a variable which has not been
2056 processed. So we never need to merge more than one new
2057 conditional into the current range.
2059 These properties also help us avoid unnecessary work. */
2060 element
2061 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, limit - 1);
2063 if (element->high && element->low)
2065 /* The last element has been processed, so there is no range
2066 merging to do, we can simply use the high/low values
2067 recorded in the last element. */
2068 low = element->low;
2069 high = element->high;
2071 else
2073 tree tmp_high, tmp_low;
2074 int dummy;
2076 /* The last element has not been processed. Process it now. */
2077 extract_range_from_cond (element->cond, &tmp_high,
2078 &tmp_low, &dummy);
2080 /* If this is the only element, then no merging is necessary,
2081 the high/low values from extract_range_from_cond are all
2082 we need. */
2083 if (limit == 1)
2085 low = tmp_low;
2086 high = tmp_high;
2088 else
2090 /* Get the high/low value from the previous element. */
2091 struct vrp_element *prev
2092 = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records,
2093 limit - 2);
2094 low = prev->low;
2095 high = prev->high;
2097 /* Merge in this element's range with the range from the
2098 previous element.
2100 The low value for the merged range is the maximum of
2101 the previous low value and the low value of this record.
2103 Similarly the high value for the merged range is the
2104 minimum of the previous high value and the high value of
2105 this record. */
2106 low = (tree_int_cst_compare (low, tmp_low) == 1
2107 ? low : tmp_low);
2108 high = (tree_int_cst_compare (high, tmp_high) == -1
2109 ? high : tmp_high);
2112 /* And record the computed range. */
2113 element->low = low;
2114 element->high = high;
2118 /* After we have constrained this variable's potential values,
2119 we try to determine the result of the given conditional.
2121 To simplify later tests, first determine if the current
2122 low value is the same low value as the conditional.
2123 Similarly for the current high value and the high value
2124 for the conditional. */
2125 lowequal = tree_int_cst_equal (low, cond_low);
2126 highequal = tree_int_cst_equal (high, cond_high);
2128 if (lowequal && highequal)
2129 return (cond_inverted ? boolean_false_node : boolean_true_node);
2131 /* To simplify the overlap/subset tests below we may want
2132 to swap the two ranges so that the larger of the two
2133 ranges occurs "first". */
2134 swapped = 0;
2135 if (tree_int_cst_compare (low, cond_low) == 1
2136 || (lowequal
2137 && tree_int_cst_compare (cond_high, high) == 1))
2139 tree temp;
2141 swapped = 1;
2142 temp = low;
2143 low = cond_low;
2144 cond_low = temp;
2145 temp = high;
2146 high = cond_high;
2147 cond_high = temp;
2150 /* Now determine if there is no overlap in the ranges
2151 or if the second range is a subset of the first range. */
2152 no_overlap = tree_int_cst_lt (high, cond_low);
2153 subset = tree_int_cst_compare (cond_high, high) != 1;
2155 /* If there was no overlap in the ranges, then this conditional
2156 always has a false value (unless we had to invert this
2157 conditional, in which case it always has a true value). */
2158 if (no_overlap)
2159 return (cond_inverted ? boolean_true_node : boolean_false_node);
2161 /* If the current range is a subset of the condition's range,
2162 then this conditional always has a true value (unless we
2163 had to invert this conditional, in which case it always
2164 has a true value). */
2165 if (subset && swapped)
2166 return (cond_inverted ? boolean_false_node : boolean_true_node);
2168 /* We were unable to determine the result of the conditional.
2169 However, we may be able to simplify the conditional. First
2170 merge the ranges in the same manner as range merging above. */
2171 low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
2172 high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
2174 /* If the range has converged to a single point, then turn this
2175 into an equality comparison. */
2176 if (TREE_CODE (cond) != EQ_EXPR
2177 && TREE_CODE (cond) != NE_EXPR
2178 && tree_int_cst_equal (low, high))
2180 TREE_SET_CODE (cond, EQ_EXPR);
2181 TREE_OPERAND (cond, 1) = high;
2185 return 0;
2188 /* STMT is a SWITCH_EXPR for which we could not trivially determine its
2189 result. This routine attempts to find equivalent forms of the
2190 condition which we may be able to optimize better. */
2192 static tree
2193 simplify_switch_and_lookup_avail_expr (tree stmt,
2194 varray_type *block_avail_exprs_p,
2195 int insert)
2197 tree cond = SWITCH_COND (stmt);
2198 tree def, to, ti;
2200 /* The optimization that we really care about is removing unnecessary
2201 casts. That will let us do much better in propagating the inferred
2202 constant at the switch target. */
2203 if (TREE_CODE (cond) == SSA_NAME)
2205 def = SSA_NAME_DEF_STMT (cond);
2206 if (TREE_CODE (def) == MODIFY_EXPR)
2208 def = TREE_OPERAND (def, 1);
2209 if (TREE_CODE (def) == NOP_EXPR)
2211 int need_precision;
2212 bool fail;
2214 def = TREE_OPERAND (def, 0);
2216 #ifdef ENABLE_CHECKING
2217 /* ??? Why was Jeff testing this? We are gimple... */
2218 if (!is_gimple_val (def))
2219 abort ();
2220 #endif
2222 to = TREE_TYPE (cond);
2223 ti = TREE_TYPE (def);
2225 /* If we have an extension that preserves value, then we
2226 can copy the source value into the switch. */
2228 need_precision = TYPE_PRECISION (ti);
2229 fail = false;
2230 if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
2231 fail = true;
2232 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
2233 need_precision += 1;
2234 if (TYPE_PRECISION (to) < need_precision)
2235 fail = true;
2237 if (!fail)
2239 SWITCH_COND (stmt) = def;
2240 modify_stmt (stmt);
2242 return lookup_avail_expr (stmt, block_avail_exprs_p, insert);
2248 return 0;
2252 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2253 known value for that SSA_NAME (or NULL if no value is known).
2255 NONZERO_VARS is the set SSA_NAMES known to have a nonzero value,
2256 even if we don't know their precise value.
2258 Propagate values from CONST_AND_COPIES and NONZERO_VARS into the PHI
2259 nodes of the successors of BB. */
2261 static void
2262 cprop_into_successor_phis (basic_block bb,
2263 varray_type const_and_copies,
2264 bitmap nonzero_vars)
2266 edge e;
2268 /* This can get rather expensive if the implementation is naive in
2269 how it finds the phi alternative associated with a particular edge. */
2270 for (e = bb->succ; e; e = e->succ_next)
2272 tree phi;
2273 int phi_num_args;
2274 int hint;
2276 /* If this is an abnormal edge, then we do not want to copy propagate
2277 into the PHI alternative associated with this edge. */
2278 if (e->flags & EDGE_ABNORMAL)
2279 continue;
2281 phi = phi_nodes (e->dest);
2282 if (! phi)
2283 continue;
2285 /* There is no guarantee that for any two PHI nodes in a block that
2286 the phi alternative associated with a particular edge will be
2287 at the same index in the phi alternative array.
2289 However, it is very likely they will be the same. So we keep
2290 track of the index of the alternative where we found the edge in
2291 the previous phi node and check that index first in the next
2292 phi node. If that hint fails, then we actually search all
2293 the entries. */
2294 phi_num_args = PHI_NUM_ARGS (phi);
2295 hint = phi_num_args;
2296 for ( ; phi; phi = PHI_CHAIN (phi))
2298 int i;
2299 tree new;
2300 use_operand_p orig_p;
2301 tree orig;
2303 /* If the hint is valid (!= phi_num_args), see if it points
2304 us to the desired phi alternative. */
2305 if (hint != phi_num_args && PHI_ARG_EDGE (phi, hint) == e)
2307 else
2309 /* The hint was either invalid or did not point to the
2310 correct phi alternative. Search all the alternatives
2311 for the correct one. Update the hint. */
2312 for (i = 0; i < phi_num_args; i++)
2313 if (PHI_ARG_EDGE (phi, i) == e)
2314 break;
2315 hint = i;
2318 #ifdef ENABLE_CHECKING
2319 /* If we did not find the proper alternative, then something is
2320 horribly wrong. */
2321 if (hint == phi_num_args)
2322 abort ();
2323 #endif
2325 /* The alternative may be associated with a constant, so verify
2326 it is an SSA_NAME before doing anything with it. */
2327 orig_p = PHI_ARG_DEF_PTR (phi, hint);
2328 orig = USE_FROM_PTR (orig_p);
2329 if (TREE_CODE (orig) != SSA_NAME)
2330 continue;
2332 /* If the alternative is known to have a nonzero value, record
2333 that fact in the PHI node itself for future use. */
2334 if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
2335 PHI_ARG_NONZERO (phi, hint) = true;
2337 /* If we have *ORIG_P in our constant/copy table, then replace
2338 ORIG_P with its value in our constant/copy table. */
2339 new = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (orig));
2340 if (new
2341 && (TREE_CODE (new) == SSA_NAME
2342 || is_gimple_min_invariant (new))
2343 && may_propagate_copy (orig, new))
2345 propagate_value (orig_p, new);
2352 /* Propagate known constants/copies into PHI nodes of BB's successor
2353 blocks. */
2355 static void
2356 cprop_into_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2357 basic_block bb)
2359 cprop_into_successor_phis (bb, const_and_copies, nonzero_vars);
2362 /* Search for redundant computations in STMT. If any are found, then
2363 replace them with the variable holding the result of the computation.
2365 If safe, record this expression into the available expression hash
2366 table. */
2368 static bool
2369 eliminate_redundant_computations (struct dom_walk_data *walk_data,
2370 tree stmt, stmt_ann_t ann)
2372 v_may_def_optype v_may_defs = V_MAY_DEF_OPS (ann);
2373 tree *expr_p, def = NULL_TREE;
2374 bool insert = true;
2375 tree cached_lhs;
2376 bool retval = false;
2377 struct dom_walk_block_data *bd
2378 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
2380 if (TREE_CODE (stmt) == MODIFY_EXPR)
2381 def = TREE_OPERAND (stmt, 0);
2383 /* Certain expressions on the RHS can be optimized away, but can not
2384 themselves be entered into the hash tables. */
2385 if (ann->makes_aliased_stores
2386 || ! def
2387 || TREE_CODE (def) != SSA_NAME
2388 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2389 || NUM_V_MAY_DEFS (v_may_defs) != 0)
2390 insert = false;
2392 /* Check if the expression has been computed before. */
2393 cached_lhs = lookup_avail_expr (stmt, &bd->avail_exprs, insert);
2395 /* If this is an assignment and the RHS was not in the hash table,
2396 then try to simplify the RHS and lookup the new RHS in the
2397 hash table. */
2398 if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
2399 cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data,
2400 stmt,
2401 insert);
2402 /* Similarly if this is a COND_EXPR and we did not find its
2403 expression in the hash table, simplify the condition and
2404 try again. */
2405 else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
2406 cached_lhs = simplify_cond_and_lookup_avail_expr (stmt,
2407 &bd->avail_exprs,
2408 ann,
2409 insert);
2410 /* Similarly for a SWITCH_EXPR. */
2411 else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR)
2412 cached_lhs = simplify_switch_and_lookup_avail_expr (stmt,
2413 &bd->avail_exprs,
2414 insert);
2416 opt_stats.num_exprs_considered++;
2418 /* Get a pointer to the expression we are trying to optimize. */
2419 if (TREE_CODE (stmt) == COND_EXPR)
2420 expr_p = &COND_EXPR_COND (stmt);
2421 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2422 expr_p = &SWITCH_COND (stmt);
2423 else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
2424 expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2425 else
2426 expr_p = &TREE_OPERAND (stmt, 1);
2428 /* It is safe to ignore types here since we have already done
2429 type checking in the hashing and equality routines. In fact
2430 type checking here merely gets in the way of constant
2431 propagation. Also, make sure that it is safe to propagate
2432 CACHED_LHS into *EXPR_P. */
2433 if (cached_lhs
2434 && (TREE_CODE (cached_lhs) != SSA_NAME
2435 || may_propagate_copy (*expr_p, cached_lhs)))
2437 if (dump_file && (dump_flags & TDF_DETAILS))
2439 fprintf (dump_file, " Replaced redundant expr '");
2440 print_generic_expr (dump_file, *expr_p, dump_flags);
2441 fprintf (dump_file, "' with '");
2442 print_generic_expr (dump_file, cached_lhs, dump_flags);
2443 fprintf (dump_file, "'\n");
2446 opt_stats.num_re++;
2448 #if defined ENABLE_CHECKING
2449 if (TREE_CODE (cached_lhs) != SSA_NAME
2450 && !is_gimple_min_invariant (cached_lhs))
2451 abort ();
2452 #endif
2454 if (TREE_CODE (cached_lhs) == ADDR_EXPR
2455 || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
2456 && is_gimple_min_invariant (cached_lhs)))
2457 retval = true;
2459 propagate_tree_value (expr_p, cached_lhs);
2460 modify_stmt (stmt);
2462 return retval;
2465 /* STMT, a MODIFY_EXPR, may create certain equivalences, in either
2466 the available expressions table or the const_and_copies table.
2467 Detect and record those equivalences. */
2469 static void
2470 record_equivalences_from_stmt (tree stmt,
2471 varray_type *block_avail_exprs_p,
2472 varray_type *block_nonzero_vars_p,
2473 int may_optimize_p,
2474 stmt_ann_t ann)
2476 tree lhs = TREE_OPERAND (stmt, 0);
2477 enum tree_code lhs_code = TREE_CODE (lhs);
2478 int i;
2480 if (lhs_code == SSA_NAME)
2482 tree rhs = TREE_OPERAND (stmt, 1);
2484 /* Strip away any useless type conversions. */
2485 STRIP_USELESS_TYPE_CONVERSION (rhs);
2487 /* If the RHS of the assignment is a constant or another variable that
2488 may be propagated, register it in the CONST_AND_COPIES table. We
2489 do not need to record unwind data for this, since this is a true
2490 assignment and not an equivalence inferred from a comparison. All
2491 uses of this ssa name are dominated by this assignment, so unwinding
2492 just costs time and space. */
2493 if (may_optimize_p
2494 && (TREE_CODE (rhs) == SSA_NAME
2495 || is_gimple_min_invariant (rhs)))
2496 set_value_for (lhs, rhs, const_and_copies);
2498 /* alloca never returns zero and the address of a non-weak symbol
2499 is never zero. NOP_EXPRs and CONVERT_EXPRs can be completely
2500 stripped as they do not affect this equivalence. */
2501 while (TREE_CODE (rhs) == NOP_EXPR
2502 || TREE_CODE (rhs) == CONVERT_EXPR)
2503 rhs = TREE_OPERAND (rhs, 0);
2505 if (alloca_call_p (rhs)
2506 || (TREE_CODE (rhs) == ADDR_EXPR
2507 && DECL_P (TREE_OPERAND (rhs, 0))
2508 && ! DECL_WEAK (TREE_OPERAND (rhs, 0))))
2509 record_var_is_nonzero (lhs, block_nonzero_vars_p);
2511 /* IOR of any value with a nonzero value will result in a nonzero
2512 value. Even if we do not know the exact result recording that
2513 the result is nonzero is worth the effort. */
2514 if (TREE_CODE (rhs) == BIT_IOR_EXPR
2515 && integer_nonzerop (TREE_OPERAND (rhs, 1)))
2516 record_var_is_nonzero (lhs, block_nonzero_vars_p);
2519 /* Look at both sides for pointer dereferences. If we find one, then
2520 the pointer must be nonnull and we can enter that equivalence into
2521 the hash tables. */
2522 if (flag_delete_null_pointer_checks)
2523 for (i = 0; i < 2; i++)
2525 tree t = TREE_OPERAND (stmt, i);
2527 /* Strip away any COMPONENT_REFs. */
2528 while (TREE_CODE (t) == COMPONENT_REF)
2529 t = TREE_OPERAND (t, 0);
2531 /* Now see if this is a pointer dereference. */
2532 if (TREE_CODE (t) == INDIRECT_REF)
2534 tree op = TREE_OPERAND (t, 0);
2536 /* If the pointer is a SSA variable, then enter new
2537 equivalences into the hash table. */
2538 while (TREE_CODE (op) == SSA_NAME)
2540 tree def = SSA_NAME_DEF_STMT (op);
2542 record_var_is_nonzero (op, block_nonzero_vars_p);
2544 /* And walk up the USE-DEF chains noting other SSA_NAMEs
2545 which are known to have a nonzero value. */
2546 if (def
2547 && TREE_CODE (def) == MODIFY_EXPR
2548 && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
2549 op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
2550 else
2551 break;
2556 /* A memory store, even an aliased store, creates a useful
2557 equivalence. By exchanging the LHS and RHS, creating suitable
2558 vops and recording the result in the available expression table,
2559 we may be able to expose more redundant loads. */
2560 if (!ann->has_volatile_ops
2561 && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
2562 || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
2563 && !is_gimple_reg (lhs))
2565 tree rhs = TREE_OPERAND (stmt, 1);
2566 tree new;
2568 /* FIXME: If the LHS of the assignment is a bitfield and the RHS
2569 is a constant, we need to adjust the constant to fit into the
2570 type of the LHS. If the LHS is a bitfield and the RHS is not
2571 a constant, then we can not record any equivalences for this
2572 statement since we would need to represent the widening or
2573 narrowing of RHS. This fixes gcc.c-torture/execute/921016-1.c
2574 and should not be necessary if GCC represented bitfields
2575 properly. */
2576 if (lhs_code == COMPONENT_REF
2577 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
2579 if (TREE_CONSTANT (rhs))
2580 rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
2581 else
2582 rhs = NULL;
2584 /* If the value overflowed, then we can not use this equivalence. */
2585 if (rhs && ! is_gimple_min_invariant (rhs))
2586 rhs = NULL;
2589 if (rhs)
2591 /* Build a new statement with the RHS and LHS exchanged. */
2592 new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
2594 create_ssa_artficial_load_stmt (&(ann->operands), new);
2596 /* Finally enter the statement into the available expression
2597 table. */
2598 lookup_avail_expr (new, block_avail_exprs_p, true);
2603 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2604 CONST_AND_COPIES. */
2606 static bool
2607 cprop_operand (tree stmt, use_operand_p op_p, varray_type const_and_copies)
2609 bool may_have_exposed_new_symbols = false;
2610 tree val;
2611 tree op = USE_FROM_PTR (op_p);
2613 /* If the operand has a known constant value or it is known to be a
2614 copy of some other variable, use the value or copy stored in
2615 CONST_AND_COPIES. */
2616 val = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (op));
2617 if (val)
2619 tree op_type, val_type;
2621 /* Do not change the base variable in the virtual operand
2622 tables. That would make it impossible to reconstruct
2623 the renamed virtual operand if we later modify this
2624 statement. Also only allow the new value to be an SSA_NAME
2625 for propagation into virtual operands. */
2626 if (!is_gimple_reg (op)
2627 && (get_virtual_var (val) != get_virtual_var (op)
2628 || TREE_CODE (val) != SSA_NAME))
2629 return false;
2631 /* Get the toplevel type of each operand. */
2632 op_type = TREE_TYPE (op);
2633 val_type = TREE_TYPE (val);
2635 /* While both types are pointers, get the type of the object
2636 pointed to. */
2637 while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2639 op_type = TREE_TYPE (op_type);
2640 val_type = TREE_TYPE (val_type);
2643 /* Make sure underlying types match before propagating a constant by
2644 converting the constant to the proper type. Note that convert may
2645 return a non-gimple expression, in which case we ignore this
2646 propagation opportunity. */
2647 if (TREE_CODE (val) != SSA_NAME)
2649 if (!lang_hooks.types_compatible_p (op_type, val_type))
2651 val = fold_convert (TREE_TYPE (op), val);
2652 if (!is_gimple_min_invariant (val))
2653 return false;
2657 /* Certain operands are not allowed to be copy propagated due
2658 to their interaction with exception handling and some GCC
2659 extensions. */
2660 else if (!may_propagate_copy (op, val))
2661 return false;
2663 /* Dump details. */
2664 if (dump_file && (dump_flags & TDF_DETAILS))
2666 fprintf (dump_file, " Replaced '");
2667 print_generic_expr (dump_file, op, dump_flags);
2668 fprintf (dump_file, "' with %s '",
2669 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2670 print_generic_expr (dump_file, val, dump_flags);
2671 fprintf (dump_file, "'\n");
2674 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2675 that we may have exposed a new symbol for SSA renaming. */
2676 if (TREE_CODE (val) == ADDR_EXPR
2677 || (POINTER_TYPE_P (TREE_TYPE (op))
2678 && is_gimple_min_invariant (val)))
2679 may_have_exposed_new_symbols = true;
2681 propagate_value (op_p, val);
2683 /* And note that we modified this statement. This is now
2684 safe, even if we changed virtual operands since we will
2685 rescan the statement and rewrite its operands again. */
2686 modify_stmt (stmt);
2688 return may_have_exposed_new_symbols;
2691 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2692 known value for that SSA_NAME (or NULL if no value is known).
2694 Propagate values from CONST_AND_COPIES into the uses, vuses and
2695 v_may_def_ops of STMT. */
2697 static bool
2698 cprop_into_stmt (tree stmt, varray_type const_and_copies)
2700 bool may_have_exposed_new_symbols = false;
2701 stmt_ann_t ann = stmt_ann (stmt);
2702 size_t i, num_uses, num_vuses, num_v_may_defs;
2703 vuse_optype vuses;
2704 v_may_def_optype v_may_defs;
2705 use_optype uses;
2707 uses = USE_OPS (ann);
2708 num_uses = NUM_USES (uses);
2709 for (i = 0; i < num_uses; i++)
2711 use_operand_p op_p = USE_OP_PTR (uses, i);
2712 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2713 may_have_exposed_new_symbols
2714 |= cprop_operand (stmt, op_p, const_and_copies);
2717 vuses = VUSE_OPS (ann);
2718 num_vuses = NUM_VUSES (vuses);
2719 for (i = 0; i < num_vuses; i++)
2721 use_operand_p op_p = VUSE_OP_PTR (vuses, i);
2722 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2723 may_have_exposed_new_symbols
2724 |= cprop_operand (stmt, op_p, const_and_copies);
2727 v_may_defs = V_MAY_DEF_OPS (ann);
2728 num_v_may_defs = NUM_V_MAY_DEFS (v_may_defs);
2729 for (i = 0; i < num_v_may_defs; i++)
2731 use_operand_p op_p = V_MAY_DEF_OP_PTR (v_may_defs, i);
2732 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2733 may_have_exposed_new_symbols
2734 |= cprop_operand (stmt, op_p, const_and_copies);
2736 return may_have_exposed_new_symbols;
2740 /* Optimize the statement pointed by iterator SI.
2742 We try to perform some simplistic global redundancy elimination and
2743 constant propagation:
2745 1- To detect global redundancy, we keep track of expressions that have
2746 been computed in this block and its dominators. If we find that the
2747 same expression is computed more than once, we eliminate repeated
2748 computations by using the target of the first one.
2750 2- Constant values and copy assignments. This is used to do very
2751 simplistic constant and copy propagation. When a constant or copy
2752 assignment is found, we map the value on the RHS of the assignment to
2753 the variable in the LHS in the CONST_AND_COPIES table. */
2755 static void
2756 optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
2757 block_stmt_iterator si)
2759 stmt_ann_t ann;
2760 tree stmt;
2761 bool may_optimize_p;
2762 bool may_have_exposed_new_symbols = false;
2763 struct dom_walk_block_data *bd
2764 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
2766 stmt = bsi_stmt (si);
2768 get_stmt_operands (stmt);
2769 ann = stmt_ann (stmt);
2770 opt_stats.num_stmts++;
2771 may_have_exposed_new_symbols = false;
2773 if (dump_file && (dump_flags & TDF_DETAILS))
2775 fprintf (dump_file, "Optimizing statement ");
2776 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2779 /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */
2780 may_have_exposed_new_symbols = cprop_into_stmt (stmt, const_and_copies);
2782 /* If the statement has been modified with constant replacements,
2783 fold its RHS before checking for redundant computations. */
2784 if (ann->modified)
2786 /* Try to fold the statement making sure that STMT is kept
2787 up to date. */
2788 if (fold_stmt (bsi_stmt_ptr (si)))
2790 stmt = bsi_stmt (si);
2791 ann = stmt_ann (stmt);
2793 if (dump_file && (dump_flags & TDF_DETAILS))
2795 fprintf (dump_file, " Folded to: ");
2796 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2800 /* Constant/copy propagation above may change the set of
2801 virtual operands associated with this statement. Folding
2802 may remove the need for some virtual operands.
2804 Indicate we will need to rescan and rewrite the statement. */
2805 may_have_exposed_new_symbols = true;
2808 /* Check for redundant computations. Do this optimization only
2809 for assignments that have no volatile ops and conditionals. */
2810 may_optimize_p = (!ann->has_volatile_ops
2811 && ((TREE_CODE (stmt) == RETURN_EXPR
2812 && TREE_OPERAND (stmt, 0)
2813 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
2814 && ! (TREE_SIDE_EFFECTS
2815 (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
2816 || (TREE_CODE (stmt) == MODIFY_EXPR
2817 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
2818 || TREE_CODE (stmt) == COND_EXPR
2819 || TREE_CODE (stmt) == SWITCH_EXPR));
2821 if (may_optimize_p)
2822 may_have_exposed_new_symbols
2823 |= eliminate_redundant_computations (walk_data, stmt, ann);
2825 /* Record any additional equivalences created by this statement. */
2826 if (TREE_CODE (stmt) == MODIFY_EXPR)
2827 record_equivalences_from_stmt (stmt,
2828 &bd->avail_exprs,
2829 &bd->nonzero_vars,
2830 may_optimize_p,
2831 ann);
2833 register_definitions_for_stmt (ann, &bd->block_defs);
2835 /* If STMT is a COND_EXPR and it was modified, then we may know
2836 where it goes. If that is the case, then mark the CFG as altered.
2838 This will cause us to later call remove_unreachable_blocks and
2839 cleanup_tree_cfg when it is safe to do so. It is not safe to
2840 clean things up here since removal of edges and such can trigger
2841 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2842 the manager.
2844 That's all fine and good, except that once SSA_NAMEs are released
2845 to the manager, we must not call create_ssa_name until all references
2846 to released SSA_NAMEs have been eliminated.
2848 All references to the deleted SSA_NAMEs can not be eliminated until
2849 we remove unreachable blocks.
2851 We can not remove unreachable blocks until after we have completed
2852 any queued jump threading.
2854 We can not complete any queued jump threads until we have taken
2855 appropriate variables out of SSA form. Taking variables out of
2856 SSA form can call create_ssa_name and thus we lose.
2858 Ultimately I suspect we're going to need to change the interface
2859 into the SSA_NAME manager. */
2861 if (ann->modified)
2863 tree val = NULL;
2865 if (TREE_CODE (stmt) == COND_EXPR)
2866 val = COND_EXPR_COND (stmt);
2867 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2868 val = SWITCH_COND (stmt);
2870 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2871 cfg_altered = true;
2873 /* If we simplified a statement in such a way as to be shown that it
2874 cannot trap, update the eh information and the cfg to match. */
2875 if (maybe_clean_eh_stmt (stmt))
2877 bitmap_set_bit (need_eh_cleanup, bb->index);
2878 if (dump_file && (dump_flags & TDF_DETAILS))
2879 fprintf (dump_file, " Flagged to clear EH edges.\n");
2883 if (may_have_exposed_new_symbols)
2885 if (! bd->stmts_to_rescan)
2886 VARRAY_TREE_INIT (bd->stmts_to_rescan, 20, "stmts_to_rescan");
2887 VARRAY_PUSH_TREE (bd->stmts_to_rescan, bsi_stmt (si));
2891 /* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the
2892 available expression hashtable, then return the LHS from the hash
2893 table.
2895 If INSERT is true, then we also update the available expression
2896 hash table to account for the changes made to STMT. */
2898 static tree
2899 update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs,
2900 varray_type *block_avail_exprs_p,
2901 bool insert)
2903 tree cached_lhs = NULL;
2905 /* Remove the old entry from the hash table. */
2906 if (insert)
2908 struct expr_hash_elt element;
2910 initialize_hash_element (stmt, NULL, &element);
2911 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
2914 /* Now update the RHS of the assignment. */
2915 TREE_OPERAND (stmt, 1) = new_rhs;
2917 /* Now lookup the updated statement in the hash table. */
2918 cached_lhs = lookup_avail_expr (stmt, block_avail_exprs_p, insert);
2920 /* We have now called lookup_avail_expr twice with two different
2921 versions of this same statement, once in optimize_stmt, once here.
2923 We know the call in optimize_stmt did not find an existing entry
2924 in the hash table, so a new entry was created. At the same time
2925 this statement was pushed onto the BLOCK_AVAIL_EXPRS varray.
2927 If this call failed to find an existing entry on the hash table,
2928 then the new version of this statement was entered into the
2929 hash table. And this statement was pushed onto BLOCK_AVAIL_EXPR
2930 for the second time. So there are two copies on BLOCK_AVAIL_EXPRs
2932 If this call succeeded, we still have one copy of this statement
2933 on the BLOCK_AVAIL_EXPRs varray.
2935 For both cases, we need to pop the most recent entry off the
2936 BLOCK_AVAIL_EXPRs varray. For the case where we never found this
2937 statement in the hash tables, that will leave precisely one
2938 copy of this statement on BLOCK_AVAIL_EXPRs. For the case where
2939 we found a copy of this statement in the second hash table lookup
2940 we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */
2941 if (insert)
2942 VARRAY_POP (*block_avail_exprs_p);
2944 /* And make sure we record the fact that we modified this
2945 statement. */
2946 modify_stmt (stmt);
2948 return cached_lhs;
2951 /* Search for an existing instance of STMT in the AVAIL_EXPRS table. If
2952 found, return its LHS. Otherwise insert STMT in the table and return
2953 NULL_TREE.
2955 Also, when an expression is first inserted in the AVAIL_EXPRS table, it
2956 is also added to the stack pointed by BLOCK_AVAIL_EXPRS_P, so that they
2957 can be removed when we finish processing this block and its children.
2959 NOTE: This function assumes that STMT is a MODIFY_EXPR node that
2960 contains no CALL_EXPR on its RHS and makes no volatile nor
2961 aliased references. */
2963 static tree
2964 lookup_avail_expr (tree stmt, varray_type *block_avail_exprs_p, bool insert)
2966 void **slot;
2967 tree lhs;
2968 tree temp;
2969 struct expr_hash_elt *element = xcalloc (sizeof (struct expr_hash_elt), 1);
2971 lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
2973 initialize_hash_element (stmt, lhs, element);
2975 /* Don't bother remembering constant assignments and copy operations.
2976 Constants and copy operations are handled by the constant/copy propagator
2977 in optimize_stmt. */
2978 if (TREE_CODE (element->rhs) == SSA_NAME
2979 || is_gimple_min_invariant (element->rhs))
2981 free (element);
2982 return NULL_TREE;
2985 /* If this is an equality test against zero, see if we have recorded a
2986 nonzero value for the variable in question. */
2987 if ((TREE_CODE (element->rhs) == EQ_EXPR
2988 || TREE_CODE (element->rhs) == NE_EXPR)
2989 && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
2990 && integer_zerop (TREE_OPERAND (element->rhs, 1)))
2992 int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
2994 if (bitmap_bit_p (nonzero_vars, indx))
2996 tree t = element->rhs;
2997 free (element);
2999 if (TREE_CODE (t) == EQ_EXPR)
3000 return boolean_false_node;
3001 else
3002 return boolean_true_node;
3006 /* Finally try to find the expression in the main expression hash table. */
3007 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
3008 (insert ? INSERT : NO_INSERT));
3009 if (slot == NULL)
3011 free (element);
3012 return NULL_TREE;
3015 if (*slot == NULL)
3017 *slot = (void *) element;
3018 if (! *block_avail_exprs_p)
3019 VARRAY_TREE_INIT (*block_avail_exprs_p, 20, "block_avail_exprs");
3020 VARRAY_PUSH_TREE (*block_avail_exprs_p, stmt ? stmt : element->rhs);
3021 return NULL_TREE;
3024 /* Extract the LHS of the assignment so that it can be used as the current
3025 definition of another variable. */
3026 lhs = ((struct expr_hash_elt *)*slot)->lhs;
3028 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
3029 use the value from the const_and_copies table. */
3030 if (TREE_CODE (lhs) == SSA_NAME)
3032 temp = get_value_for (lhs, const_and_copies);
3033 if (temp)
3034 lhs = temp;
3037 free (element);
3038 return lhs;
3041 /* Given a condition COND, record into HI_P, LO_P and INVERTED_P the
3042 range of values that result in the conditional having a true value.
3044 Return true if we are successful in extracting a range from COND and
3045 false if we are unsuccessful. */
3047 static bool
3048 extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
3050 tree op1 = TREE_OPERAND (cond, 1);
3051 tree high, low, type;
3052 int inverted;
3054 /* Experiments have shown that it's rarely, if ever useful to
3055 record ranges for enumerations. Presumably this is due to
3056 the fact that they're rarely used directly. They are typically
3057 cast into an integer type and used that way. */
3058 if (TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE)
3059 return 0;
3061 type = TREE_TYPE (op1);
3063 switch (TREE_CODE (cond))
3065 case EQ_EXPR:
3066 high = low = op1;
3067 inverted = 0;
3068 break;
3070 case NE_EXPR:
3071 high = low = op1;
3072 inverted = 1;
3073 break;
3075 case GE_EXPR:
3076 low = op1;
3077 high = TYPE_MAX_VALUE (type);
3078 inverted = 0;
3079 break;
3081 case GT_EXPR:
3082 low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
3083 high = TYPE_MAX_VALUE (type);
3084 inverted = 0;
3085 break;
3087 case LE_EXPR:
3088 high = op1;
3089 low = TYPE_MIN_VALUE (type);
3090 inverted = 0;
3091 break;
3093 case LT_EXPR:
3094 high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
3095 low = TYPE_MIN_VALUE (type);
3096 inverted = 0;
3097 break;
3099 default:
3100 return 0;
3103 *hi_p = high;
3104 *lo_p = low;
3105 *inverted_p = inverted;
3106 return 1;
3109 /* Record a range created by COND for basic block BB. */
3111 static void
3112 record_range (tree cond, basic_block bb, varray_type *vrp_variables_p)
3114 /* We explicitly ignore NE_EXPRs. They rarely allow for meaningful
3115 range optimizations and significantly complicate the implementation. */
3116 if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<'
3117 && TREE_CODE (cond) != NE_EXPR
3118 && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
3120 struct vrp_element *element = ggc_alloc (sizeof (struct vrp_element));
3121 int ssa_version = SSA_NAME_VERSION (TREE_OPERAND (cond, 0));
3123 varray_type *vrp_records_p
3124 = (varray_type *)&VARRAY_GENERIC_PTR (vrp_data, ssa_version);
3126 element->low = NULL;
3127 element->high = NULL;
3128 element->cond = cond;
3129 element->bb = bb;
3131 if (*vrp_records_p == NULL)
3133 VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
3134 VARRAY_GENERIC_PTR (vrp_data, ssa_version) = *vrp_records_p;
3137 VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
3138 if (! *vrp_variables_p)
3139 VARRAY_TREE_INIT (*vrp_variables_p, 2, "vrp_variables");
3140 VARRAY_PUSH_TREE (*vrp_variables_p, TREE_OPERAND (cond, 0));
3144 /* Given a conditional statement IF_STMT, return the assignment 'X = Y'
3145 known to be true depending on which arm of IF_STMT is taken.
3147 Not all conditional statements will result in a useful assignment.
3148 Return NULL_TREE in that case.
3150 Also enter into the available expression table statements of
3151 the form:
3153 TRUE ARM FALSE ARM
3154 1 = cond 1 = cond'
3155 0 = cond' 0 = cond
3157 This allows us to lookup the condition in a dominated block and
3158 get back a constant indicating if the condition is true. */
3160 static struct eq_expr_value
3161 get_eq_expr_value (tree if_stmt,
3162 int true_arm,
3163 varray_type *block_avail_exprs_p,
3164 basic_block bb,
3165 varray_type *vrp_variables_p)
3167 tree cond;
3168 struct eq_expr_value retval;
3170 cond = COND_EXPR_COND (if_stmt);
3171 retval.src = NULL;
3172 retval.dst = NULL;
3174 /* If the conditional is a single variable 'X', return 'X = 1' for
3175 the true arm and 'X = 0' on the false arm. */
3176 if (TREE_CODE (cond) == SSA_NAME)
3178 retval.dst = cond;
3179 retval.src = constant_boolean_node (true_arm, TREE_TYPE (cond));
3180 return retval;
3183 /* If we have a comparison expression, then record its result into
3184 the available expression table. */
3185 if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<')
3187 tree op0 = TREE_OPERAND (cond, 0);
3188 tree op1 = TREE_OPERAND (cond, 1);
3190 /* Special case comparing booleans against a constant as we know
3191 the value of OP0 on both arms of the branch. ie, we can record
3192 an equivalence for OP0 rather than COND. */
3193 if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
3194 && TREE_CODE (op0) == SSA_NAME
3195 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
3196 && is_gimple_min_invariant (op1))
3198 if ((TREE_CODE (cond) == EQ_EXPR && true_arm)
3199 || (TREE_CODE (cond) == NE_EXPR && ! true_arm))
3201 retval.src = op1;
3203 else
3205 if (integer_zerop (op1))
3206 retval.src = boolean_true_node;
3207 else
3208 retval.src = boolean_false_node;
3210 retval.dst = op0;
3211 return retval;
3214 if (TREE_CODE (op0) == SSA_NAME
3215 && (is_gimple_min_invariant (op1) || TREE_CODE (op1) == SSA_NAME))
3217 tree inverted = invert_truthvalue (cond);
3219 /* When we find an available expression in the hash table, we replace
3220 the expression with the LHS of the statement in the hash table.
3222 So, we want to build statements such as "1 = <condition>" on the
3223 true arm and "0 = <condition>" on the false arm. That way if we
3224 find the expression in the table, we will replace it with its
3225 known constant value. Also insert inversions of the result and
3226 condition into the hash table. */
3227 if (true_arm)
3229 record_cond (cond, boolean_true_node, block_avail_exprs_p);
3230 record_dominating_conditions (cond, block_avail_exprs_p);
3231 record_cond (inverted, boolean_false_node, block_avail_exprs_p);
3233 if (TREE_CONSTANT (op1))
3234 record_range (cond, bb, vrp_variables_p);
3236 /* If the conditional is of the form 'X == Y', return 'X = Y'
3237 for the true arm. */
3238 if (TREE_CODE (cond) == EQ_EXPR)
3240 retval.dst = op0;
3241 retval.src = op1;
3242 return retval;
3245 else
3248 record_cond (inverted, boolean_true_node, block_avail_exprs_p);
3249 record_dominating_conditions (inverted, block_avail_exprs_p);
3250 record_cond (cond, boolean_false_node, block_avail_exprs_p);
3252 if (TREE_CONSTANT (op1))
3253 record_range (inverted, bb, vrp_variables_p);
3255 /* If the conditional is of the form 'X != Y', return 'X = Y'
3256 for the false arm. */
3257 if (TREE_CODE (cond) == NE_EXPR)
3259 retval.dst = op0;
3260 retval.src = op1;
3261 return retval;
3267 return retval;
3270 /* Hashing and equality functions for AVAIL_EXPRS. The table stores
3271 MODIFY_EXPR statements. We compute a value number for expressions using
3272 the code of the expression and the SSA numbers of its operands. */
3274 static hashval_t
3275 avail_expr_hash (const void *p)
3277 stmt_ann_t ann = ((struct expr_hash_elt *)p)->ann;
3278 tree rhs = ((struct expr_hash_elt *)p)->rhs;
3279 hashval_t val = 0;
3280 size_t i;
3281 vuse_optype vuses;
3283 /* iterative_hash_expr knows how to deal with any expression and
3284 deals with commutative operators as well, so just use it instead
3285 of duplicating such complexities here. */
3286 val = iterative_hash_expr (rhs, val);
3288 /* If the hash table entry is not associated with a statement, then we
3289 can just hash the expression and not worry about virtual operands
3290 and such. */
3291 if (!ann)
3292 return val;
3294 /* Add the SSA version numbers of every vuse operand. This is important
3295 because compound variables like arrays are not renamed in the
3296 operands. Rather, the rename is done on the virtual variable
3297 representing all the elements of the array. */
3298 vuses = VUSE_OPS (ann);
3299 for (i = 0; i < NUM_VUSES (vuses); i++)
3300 val = iterative_hash_expr (VUSE_OP (vuses, i), val);
3302 return val;
3305 static hashval_t
3306 real_avail_expr_hash (const void *p)
3308 return ((const struct expr_hash_elt *)p)->hash;
3311 static int
3312 avail_expr_eq (const void *p1, const void *p2)
3314 stmt_ann_t ann1 = ((struct expr_hash_elt *)p1)->ann;
3315 tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
3316 stmt_ann_t ann2 = ((struct expr_hash_elt *)p2)->ann;
3317 tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
3319 /* If they are the same physical expression, return true. */
3320 if (rhs1 == rhs2 && ann1 == ann2)
3321 return true;
3323 /* If their codes are not equal, then quit now. */
3324 if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
3325 return false;
3327 /* In case of a collision, both RHS have to be identical and have the
3328 same VUSE operands. */
3329 if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
3330 || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
3331 && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
3333 vuse_optype ops1 = NULL;
3334 vuse_optype ops2 = NULL;
3335 size_t num_ops1 = 0;
3336 size_t num_ops2 = 0;
3337 size_t i;
3339 if (ann1)
3341 ops1 = VUSE_OPS (ann1);
3342 num_ops1 = NUM_VUSES (ops1);
3345 if (ann2)
3347 ops2 = VUSE_OPS (ann2);
3348 num_ops2 = NUM_VUSES (ops2);
3351 /* If the number of virtual uses is different, then we consider
3352 them not equal. */
3353 if (num_ops1 != num_ops2)
3354 return false;
3356 for (i = 0; i < num_ops1; i++)
3357 if (VUSE_OP (ops1, i) != VUSE_OP (ops2, i))
3358 return false;
3360 #ifdef ENABLE_CHECKING
3361 if (((struct expr_hash_elt *)p1)->hash
3362 != ((struct expr_hash_elt *)p2)->hash)
3363 abort ();
3364 #endif
3365 return true;
3368 return false;
3371 /* Given STMT and a pointer to the block local definitions BLOCK_DEFS_P,
3372 register register all objects set by this statement into BLOCK_DEFS_P
3373 and CURRDEFS. */
3375 static void
3376 register_definitions_for_stmt (stmt_ann_t ann, varray_type *block_defs_p)
3378 def_optype defs;
3379 v_may_def_optype v_may_defs;
3380 v_must_def_optype v_must_defs;
3381 unsigned int i;
3383 defs = DEF_OPS (ann);
3384 for (i = 0; i < NUM_DEFS (defs); i++)
3386 tree def = DEF_OP (defs, i);
3388 /* FIXME: We shouldn't be registering new defs if the variable
3389 doesn't need to be renamed. */
3390 register_new_def (def, block_defs_p);
3393 /* Register new virtual definitions made by the statement. */
3394 v_may_defs = V_MAY_DEF_OPS (ann);
3395 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
3397 /* FIXME: We shouldn't be registering new defs if the variable
3398 doesn't need to be renamed. */
3399 register_new_def (V_MAY_DEF_RESULT (v_may_defs, i), block_defs_p);
3402 /* Register new virtual mustdefs made by the statement. */
3403 v_must_defs = V_MUST_DEF_OPS (ann);
3404 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
3406 /* FIXME: We shouldn't be registering new defs if the variable
3407 doesn't need to be renamed. */
3408 register_new_def (V_MUST_DEF_OP (v_must_defs, i), block_defs_p);