Merge from gomp branch:
[official-gcc.git] / gcc / tree-ssa-dom.c
blobb522fac3e44281e5db59db7448d464c69d04f9b5
1 /* SSA Dominator optimizations for trees
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "ggc.h"
31 #include "basic-block.h"
32 #include "cfgloop.h"
33 #include "output.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "domwalk.h"
41 #include "real.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
45 #include "params.h"
47 /* This file implements optimizations on the dominator tree. */
50 /* Structure for recording edge equivalences as well as any pending
51 edge redirections during the dominator optimizer.
53 Computing and storing the edge equivalences instead of creating
54 them on-demand can save significant amounts of time, particularly
55 for pathological cases involving switch statements.
57 These structures live for a single iteration of the dominator
58 optimizer in the edge's AUX field. At the end of an iteration we
59 free each of these structures and update the AUX field to point
60 to any requested redirection target (the code for updating the
61 CFG and SSA graph for edge redirection expects redirection edge
62 targets to be in the AUX field for each edge. */
64 struct edge_info
66 /* If this edge creates a simple equivalence, the LHS and RHS of
67 the equivalence will be stored here. */
68 tree lhs;
69 tree rhs;
71 /* Traversing an edge may also indicate one or more particular conditions
72 are true or false. The number of recorded conditions can vary, but
73 can be determined by the condition's code. So we have an array
74 and its maximum index rather than use a varray. */
75 tree *cond_equivalences;
76 unsigned int max_cond_equivalences;
80 /* Hash table with expressions made available during the renaming process.
81 When an assignment of the form X_i = EXPR is found, the statement is
82 stored in this table. If the same expression EXPR is later found on the
83 RHS of another statement, it is replaced with X_i (thus performing
84 global redundancy elimination). Similarly as we pass through conditionals
85 we record the conditional itself as having either a true or false value
86 in this table. */
87 static htab_t avail_exprs;
89 /* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
90 expressions it enters into the hash table along with a marker entry
91 (null). When we finish processing the block, we pop off entries and
92 remove the expressions from the global hash table until we hit the
93 marker. */
94 static VEC(tree,heap) *avail_exprs_stack;
96 /* Stack of statements we need to rescan during finalization for newly
97 exposed variables.
99 Statement rescanning must occur after the current block's available
100 expressions are removed from AVAIL_EXPRS. Else we may change the
101 hash code for an expression and be unable to find/remove it from
102 AVAIL_EXPRS. */
103 static VEC(tree,heap) *stmts_to_rescan;
105 /* Structure for entries in the expression hash table.
107 This requires more memory for the hash table entries, but allows us
108 to avoid creating silly tree nodes and annotations for conditionals,
109 eliminates 2 global hash tables and two block local varrays.
111 It also allows us to reduce the number of hash table lookups we
112 have to perform in lookup_avail_expr and finally it allows us to
113 significantly reduce the number of calls into the hashing routine
114 itself. */
116 struct expr_hash_elt
118 /* The value (lhs) of this expression. */
119 tree lhs;
121 /* The expression (rhs) we want to record. */
122 tree rhs;
124 /* The stmt pointer if this element corresponds to a statement. */
125 tree stmt;
127 /* The hash value for RHS/ann. */
128 hashval_t hash;
131 /* Stack of dest,src pairs that need to be restored during finalization.
133 A NULL entry is used to mark the end of pairs which need to be
134 restored during finalization of this block. */
135 static VEC(tree,heap) *const_and_copies_stack;
137 /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
138 know their exact value. */
139 static bitmap nonzero_vars;
141 /* Stack of SSA_NAMEs which need their NONZERO_VARS property cleared
142 when the current block is finalized.
144 A NULL entry is used to mark the end of names needing their
145 entry in NONZERO_VARS cleared during finalization of this block. */
146 static VEC(tree,heap) *nonzero_vars_stack;
148 /* Track whether or not we have changed the control flow graph. */
149 static bool cfg_altered;
151 /* Bitmap of blocks that have had EH statements cleaned. We should
152 remove their dead edges eventually. */
153 static bitmap need_eh_cleanup;
155 /* Statistics for dominator optimizations. */
156 struct opt_stats_d
158 long num_stmts;
159 long num_exprs_considered;
160 long num_re;
161 long num_const_prop;
162 long num_copy_prop;
163 long num_iterations;
166 static struct opt_stats_d opt_stats;
168 /* Value range propagation record. Each time we encounter a conditional
169 of the form SSA_NAME COND CONST we create a new vrp_element to record
170 how the condition affects the possible values SSA_NAME may have.
172 Each record contains the condition tested (COND), and the range of
173 values the variable may legitimately have if COND is true. Note the
174 range of values may be a smaller range than COND specifies if we have
175 recorded other ranges for this variable. Each record also contains the
176 block in which the range was recorded for invalidation purposes.
178 Note that the current known range is computed lazily. This allows us
179 to avoid the overhead of computing ranges which are never queried.
181 When we encounter a conditional, we look for records which constrain
182 the SSA_NAME used in the condition. In some cases those records allow
183 us to determine the condition's result at compile time. In other cases
184 they may allow us to simplify the condition.
186 We also use value ranges to do things like transform signed div/mod
187 operations into unsigned div/mod or to simplify ABS_EXPRs.
189 Simple experiments have shown these optimizations to not be all that
190 useful on switch statements (much to my surprise). So switch statement
191 optimizations are not performed.
193 Note carefully we do not propagate information through each statement
194 in the block. i.e., if we know variable X has a value defined of
195 [0, 25] and we encounter Y = X + 1, we do not track a value range
196 for Y (which would be [1, 26] if we cared). Similarly we do not
197 constrain values as we encounter narrowing typecasts, etc. */
199 struct vrp_element
201 /* The highest and lowest values the variable in COND may contain when
202 COND is true. Note this may not necessarily be the same values
203 tested by COND if the same variable was used in earlier conditionals.
205 Note this is computed lazily and thus can be NULL indicating that
206 the values have not been computed yet. */
207 tree low;
208 tree high;
210 /* The actual conditional we recorded. This is needed since we compute
211 ranges lazily. */
212 tree cond;
214 /* The basic block where this record was created. We use this to determine
215 when to remove records. */
216 basic_block bb;
219 /* A hash table holding value range records (VRP_ELEMENTs) for a given
220 SSA_NAME. We used to use a varray indexed by SSA_NAME_VERSION, but
221 that gets awful wasteful, particularly since the density objects
222 with useful information is very low. */
223 static htab_t vrp_data;
225 typedef struct vrp_element *vrp_element_p;
227 DEF_VEC_P(vrp_element_p);
228 DEF_VEC_ALLOC_P(vrp_element_p,heap);
230 /* An entry in the VRP_DATA hash table. We record the variable and a
231 varray of VRP_ELEMENT records associated with that variable. */
232 struct vrp_hash_elt
234 tree var;
235 VEC(vrp_element_p,heap) *records;
238 /* Array of variables which have their values constrained by operations
239 in this basic block. We use this during finalization to know
240 which variables need their VRP data updated. */
242 /* Stack of SSA_NAMEs which had their values constrained by operations
243 in this basic block. During finalization of this block we use this
244 list to determine which variables need their VRP data updated.
246 A NULL entry marks the end of the SSA_NAMEs associated with this block. */
247 static VEC(tree,heap) *vrp_variables_stack;
249 struct eq_expr_value
251 tree src;
252 tree dst;
255 /* Local functions. */
256 static void optimize_stmt (struct dom_walk_data *,
257 basic_block bb,
258 block_stmt_iterator);
259 static tree lookup_avail_expr (tree, bool);
260 static hashval_t vrp_hash (const void *);
261 static int vrp_eq (const void *, const void *);
262 static hashval_t avail_expr_hash (const void *);
263 static hashval_t real_avail_expr_hash (const void *);
264 static int avail_expr_eq (const void *, const void *);
265 static void htab_statistics (FILE *, htab_t);
266 static void record_cond (tree, tree);
267 static void record_const_or_copy (tree, tree);
268 static void record_equality (tree, tree);
269 static tree simplify_cond_and_lookup_avail_expr (tree);
270 static void record_range (tree, basic_block);
271 static bool extract_range_from_cond (tree, tree *, tree *, int *);
272 static void record_equivalences_from_phis (basic_block);
273 static void record_equivalences_from_incoming_edge (basic_block);
274 static bool eliminate_redundant_computations (tree);
275 static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
276 static void thread_across_edge (struct dom_walk_data *, edge);
277 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
278 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
279 static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
280 static void remove_local_expressions_from_table (void);
281 static void restore_vars_to_original_value (void);
282 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
283 static void restore_nonzero_vars_to_original_value (void);
284 static inline bool unsafe_associative_fp_binop (tree);
287 /* Allocate an EDGE_INFO for edge E and attach it to E.
288 Return the new EDGE_INFO structure. */
290 static struct edge_info *
291 allocate_edge_info (edge e)
293 struct edge_info *edge_info;
295 edge_info = XCNEW (struct edge_info);
297 e->aux = edge_info;
298 return edge_info;
301 /* Free all EDGE_INFO structures associated with edges in the CFG.
302 If a particular edge can be threaded, copy the redirection
303 target from the EDGE_INFO structure into the edge's AUX field
304 as required by code to update the CFG and SSA graph for
305 jump threading. */
307 static void
308 free_all_edge_infos (void)
310 basic_block bb;
311 edge_iterator ei;
312 edge e;
314 FOR_EACH_BB (bb)
316 FOR_EACH_EDGE (e, ei, bb->preds)
318 struct edge_info *edge_info = (struct edge_info *) e->aux;
320 if (edge_info)
322 if (edge_info->cond_equivalences)
323 free (edge_info->cond_equivalences);
324 free (edge_info);
325 e->aux = NULL;
331 /* Free an instance of vrp_hash_elt. */
333 static void
334 vrp_free (void *data)
336 struct vrp_hash_elt *elt = (struct vrp_hash_elt *) data;
337 struct VEC(vrp_element_p,heap) **vrp_elt = &elt->records;
339 VEC_free (vrp_element_p, heap, *vrp_elt);
340 free (elt);
343 /* Jump threading, redundancy elimination and const/copy propagation.
345 This pass may expose new symbols that need to be renamed into SSA. For
346 every new symbol exposed, its corresponding bit will be set in
347 VARS_TO_RENAME. */
349 static void
350 tree_ssa_dominator_optimize (void)
352 struct dom_walk_data walk_data;
353 unsigned int i;
354 struct loops loops_info;
356 memset (&opt_stats, 0, sizeof (opt_stats));
358 /* Create our hash tables. */
359 avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
360 vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq,
361 vrp_free);
362 avail_exprs_stack = VEC_alloc (tree, heap, 20);
363 const_and_copies_stack = VEC_alloc (tree, heap, 20);
364 nonzero_vars_stack = VEC_alloc (tree, heap, 20);
365 vrp_variables_stack = VEC_alloc (tree, heap, 20);
366 stmts_to_rescan = VEC_alloc (tree, heap, 20);
367 nonzero_vars = BITMAP_ALLOC (NULL);
368 need_eh_cleanup = BITMAP_ALLOC (NULL);
370 /* Setup callbacks for the generic dominator tree walker. */
371 walk_data.walk_stmts_backward = false;
372 walk_data.dom_direction = CDI_DOMINATORS;
373 walk_data.initialize_block_local_data = NULL;
374 walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
375 walk_data.before_dom_children_walk_stmts = optimize_stmt;
376 walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
377 walk_data.after_dom_children_before_stmts = NULL;
378 walk_data.after_dom_children_walk_stmts = NULL;
379 walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
380 /* Right now we only attach a dummy COND_EXPR to the global data pointer.
381 When we attach more stuff we'll need to fill this out with a real
382 structure. */
383 walk_data.global_data = NULL;
384 walk_data.block_local_data_size = 0;
385 walk_data.interesting_blocks = NULL;
387 /* Now initialize the dominator walker. */
388 init_walk_dominator_tree (&walk_data);
390 calculate_dominance_info (CDI_DOMINATORS);
392 /* We need to know which edges exit loops so that we can
393 aggressively thread through loop headers to an exit
394 edge. */
395 flow_loops_find (&loops_info);
396 mark_loop_exit_edges (&loops_info);
397 flow_loops_free (&loops_info);
399 /* Clean up the CFG so that any forwarder blocks created by loop
400 canonicalization are removed. */
401 cleanup_tree_cfg ();
402 calculate_dominance_info (CDI_DOMINATORS);
404 /* If we prove certain blocks are unreachable, then we want to
405 repeat the dominator optimization process as PHI nodes may
406 have turned into copies which allows better propagation of
407 values. So we repeat until we do not identify any new unreachable
408 blocks. */
411 /* Optimize the dominator tree. */
412 cfg_altered = false;
414 /* We need accurate information regarding back edges in the CFG
415 for jump threading. */
416 mark_dfs_back_edges ();
418 /* Recursively walk the dominator tree optimizing statements. */
419 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
422 block_stmt_iterator bsi;
423 basic_block bb;
424 FOR_EACH_BB (bb)
426 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
428 update_stmt_if_modified (bsi_stmt (bsi));
433 /* If we exposed any new variables, go ahead and put them into
434 SSA form now, before we handle jump threading. This simplifies
435 interactions between rewriting of _DECL nodes into SSA form
436 and rewriting SSA_NAME nodes into SSA form after block
437 duplication and CFG manipulation. */
438 update_ssa (TODO_update_ssa);
440 free_all_edge_infos ();
442 /* Thread jumps, creating duplicate blocks as needed. */
443 cfg_altered |= thread_through_all_blocks ();
445 /* Removal of statements may make some EH edges dead. Purge
446 such edges from the CFG as needed. */
447 if (!bitmap_empty_p (need_eh_cleanup))
449 cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
450 bitmap_zero (need_eh_cleanup);
453 if (cfg_altered)
454 free_dominance_info (CDI_DOMINATORS);
456 /* Only iterate if we threaded jumps AND the CFG cleanup did
457 something interesting. Other cases generate far fewer
458 optimization opportunities and thus are not worth another
459 full DOM iteration. */
460 cfg_altered &= cleanup_tree_cfg ();
462 if (rediscover_loops_after_threading)
464 /* Rerun basic loop analysis to discover any newly
465 created loops and update the set of exit edges. */
466 rediscover_loops_after_threading = false;
467 flow_loops_find (&loops_info);
468 mark_loop_exit_edges (&loops_info);
469 flow_loops_free (&loops_info);
471 /* Remove any forwarder blocks inserted by loop
472 header canonicalization. */
473 cleanup_tree_cfg ();
476 calculate_dominance_info (CDI_DOMINATORS);
478 update_ssa (TODO_update_ssa);
480 /* Reinitialize the various tables. */
481 bitmap_clear (nonzero_vars);
482 htab_empty (avail_exprs);
483 htab_empty (vrp_data);
485 /* Finally, remove everything except invariants in SSA_NAME_VALUE.
487 This must be done before we iterate as we might have a
488 reference to an SSA_NAME which was removed by the call to
489 update_ssa.
491 Long term we will be able to let everything in SSA_NAME_VALUE
492 persist. However, for now, we know this is the safe thing to do. */
493 for (i = 0; i < num_ssa_names; i++)
495 tree name = ssa_name (i);
496 tree value;
498 if (!name)
499 continue;
501 value = SSA_NAME_VALUE (name);
502 if (value && !is_gimple_min_invariant (value))
503 SSA_NAME_VALUE (name) = NULL;
506 opt_stats.num_iterations++;
508 while (optimize > 1 && cfg_altered);
510 /* Debugging dumps. */
511 if (dump_file && (dump_flags & TDF_STATS))
512 dump_dominator_optimization_stats (dump_file);
514 /* We emptied the hash table earlier, now delete it completely. */
515 htab_delete (avail_exprs);
516 htab_delete (vrp_data);
518 /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
519 CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
520 of the do-while loop above. */
522 /* And finalize the dominator walker. */
523 fini_walk_dominator_tree (&walk_data);
525 /* Free nonzero_vars. */
526 BITMAP_FREE (nonzero_vars);
527 BITMAP_FREE (need_eh_cleanup);
529 VEC_free (tree, heap, avail_exprs_stack);
530 VEC_free (tree, heap, const_and_copies_stack);
531 VEC_free (tree, heap, nonzero_vars_stack);
532 VEC_free (tree, heap, vrp_variables_stack);
533 VEC_free (tree, heap, stmts_to_rescan);
536 static bool
537 gate_dominator (void)
539 return flag_tree_dom != 0;
542 struct tree_opt_pass pass_dominator =
544 "dom", /* name */
545 gate_dominator, /* gate */
546 tree_ssa_dominator_optimize, /* execute */
547 NULL, /* sub */
548 NULL, /* next */
549 0, /* static_pass_number */
550 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
551 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
552 0, /* properties_provided */
553 0, /* properties_destroyed */
554 0, /* todo_flags_start */
555 TODO_dump_func
556 | TODO_update_ssa
557 | TODO_verify_ssa, /* todo_flags_finish */
558 0 /* letter */
562 /* Given a stmt CONDSTMT containing a COND_EXPR, canonicalize the
563 COND_EXPR into a canonical form. */
565 static void
566 canonicalize_comparison (tree condstmt)
568 tree cond = COND_EXPR_COND (condstmt);
569 tree op0;
570 tree op1;
571 enum tree_code code = TREE_CODE (cond);
573 if (!COMPARISON_CLASS_P (cond))
574 return;
576 op0 = TREE_OPERAND (cond, 0);
577 op1 = TREE_OPERAND (cond, 1);
579 /* If it would be profitable to swap the operands, then do so to
580 canonicalize the statement, enabling better optimization.
582 By placing canonicalization of such expressions here we
583 transparently keep statements in canonical form, even
584 when the statement is modified. */
585 if (tree_swap_operands_p (op0, op1, false))
587 /* For relationals we need to swap the operands
588 and change the code. */
589 if (code == LT_EXPR
590 || code == GT_EXPR
591 || code == LE_EXPR
592 || code == GE_EXPR)
594 TREE_SET_CODE (cond, swap_tree_comparison (code));
595 swap_tree_operands (condstmt,
596 &TREE_OPERAND (cond, 0),
597 &TREE_OPERAND (cond, 1));
598 /* If one operand was in the operand cache, but the other is
599 not, because it is a constant, this is a case that the
600 internal updating code of swap_tree_operands can't handle
601 properly. */
602 if (TREE_CODE_CLASS (TREE_CODE (op0))
603 != TREE_CODE_CLASS (TREE_CODE (op1)))
604 update_stmt (condstmt);
608 /* We are exiting E->src, see if E->dest ends with a conditional
609 jump which has a known value when reached via E.
611 Special care is necessary if E is a back edge in the CFG as we
612 will have already recorded equivalences for E->dest into our
613 various tables, including the result of the conditional at
614 the end of E->dest. Threading opportunities are severely
615 limited in that case to avoid short-circuiting the loop
616 incorrectly.
618 Note it is quite common for the first block inside a loop to
619 end with a conditional which is either always true or always
620 false when reached via the loop backedge. Thus we do not want
621 to blindly disable threading across a loop backedge. */
623 static void
624 thread_across_edge (struct dom_walk_data *walk_data, edge e)
626 block_stmt_iterator bsi;
627 tree stmt = NULL;
628 tree phi;
629 int stmt_count = 0;
630 int max_stmt_count;
633 /* If E->dest does not end with a conditional, then there is
634 nothing to do. */
635 bsi = bsi_last (e->dest);
636 if (bsi_end_p (bsi)
637 || ! bsi_stmt (bsi)
638 || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR
639 && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR
640 && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR))
641 return;
643 /* The basic idea here is to use whatever knowledge we have
644 from our dominator walk to simplify statements in E->dest,
645 with the ultimate goal being to simplify the conditional
646 at the end of E->dest.
648 Note that we must undo any changes we make to the underlying
649 statements as the simplifications we are making are control
650 flow sensitive (ie, the simplifications are valid when we
651 traverse E, but may not be valid on other paths to E->dest. */
653 /* Each PHI creates a temporary equivalence, record them. Again
654 these are context sensitive equivalences and will be removed
655 by our caller. */
656 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
658 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
659 tree dst = PHI_RESULT (phi);
661 /* Do not include virtual PHIs in our statement count as
662 they never generate code. */
663 if (is_gimple_reg (dst))
664 stmt_count++;
666 /* If the desired argument is not the same as this PHI's result
667 and it is set by a PHI in E->dest, then we can not thread
668 through E->dest. */
669 if (src != dst
670 && TREE_CODE (src) == SSA_NAME
671 && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
672 && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
673 return;
675 record_const_or_copy (dst, src);
678 /* Try to simplify each statement in E->dest, ultimately leading to
679 a simplification of the COND_EXPR at the end of E->dest.
681 We might consider marking just those statements which ultimately
682 feed the COND_EXPR. It's not clear if the overhead of bookkeeping
683 would be recovered by trying to simplify fewer statements.
685 If we are able to simplify a statement into the form
686 SSA_NAME = (SSA_NAME | gimple invariant), then we can record
687 a context sensitive equivalency which may help us simplify
688 later statements in E->dest.
690 Failure to simplify into the form above merely means that the
691 statement provides no equivalences to help simplify later
692 statements. This does not prevent threading through E->dest. */
693 max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
694 for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
696 tree cached_lhs = NULL;
698 stmt = bsi_stmt (bsi);
700 /* Ignore empty statements and labels. */
701 if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
702 continue;
704 /* If duplicating this block is going to cause too much code
705 expansion, then do not thread through this block. */
706 stmt_count++;
707 if (stmt_count > max_stmt_count)
708 return;
710 /* Safely handle threading across loop backedges. This is
711 over conservative, but still allows us to capture the
712 majority of the cases where we can thread across a loop
713 backedge. */
714 if ((e->flags & EDGE_DFS_BACK) != 0
715 && TREE_CODE (stmt) != COND_EXPR
716 && TREE_CODE (stmt) != SWITCH_EXPR)
717 return;
719 /* If the statement has volatile operands, then we assume we
720 can not thread through this block. This is overly
721 conservative in some ways. */
722 if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt))
723 return;
725 /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
726 value, then do not try to simplify this statement as it will
727 not simplify in any way that is helpful for jump threading. */
728 if (TREE_CODE (stmt) != MODIFY_EXPR
729 || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
730 continue;
732 /* At this point we have a statement which assigns an RHS to an
733 SSA_VAR on the LHS. We want to try and simplify this statement
734 to expose more context sensitive equivalences which in turn may
735 allow us to simplify the condition at the end of the loop. */
736 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
737 cached_lhs = TREE_OPERAND (stmt, 1);
738 else
740 /* Copy the operands. */
741 tree *copy, pre_fold_expr;
742 ssa_op_iter iter;
743 use_operand_p use_p;
744 unsigned int num, i = 0;
746 num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
747 copy = XCNEWVEC (tree, num);
749 /* Make a copy of the uses & vuses into USES_COPY, then cprop into
750 the operands. */
751 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
753 tree tmp = NULL;
754 tree use = USE_FROM_PTR (use_p);
756 copy[i++] = use;
757 if (TREE_CODE (use) == SSA_NAME)
758 tmp = SSA_NAME_VALUE (use);
759 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
760 SET_USE (use_p, tmp);
763 /* Try to fold/lookup the new expression. Inserting the
764 expression into the hash table is unlikely to help
765 Sadly, we have to handle conditional assignments specially
766 here, because fold expects all the operands of an expression
767 to be folded before the expression itself is folded, but we
768 can't just substitute the folded condition here. */
769 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR)
771 tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1));
772 cond = fold (cond);
773 if (cond == boolean_true_node)
774 pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1));
775 else if (cond == boolean_false_node)
776 pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1));
777 else
778 pre_fold_expr = TREE_OPERAND (stmt, 1);
780 else
781 pre_fold_expr = TREE_OPERAND (stmt, 1);
783 if (pre_fold_expr)
785 cached_lhs = fold (pre_fold_expr);
786 if (TREE_CODE (cached_lhs) != SSA_NAME
787 && !is_gimple_min_invariant (cached_lhs))
788 cached_lhs = lookup_avail_expr (stmt, false);
791 /* Restore the statement's original uses/defs. */
792 i = 0;
793 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
794 SET_USE (use_p, copy[i++]);
796 free (copy);
799 /* Record the context sensitive equivalence if we were able
800 to simplify this statement. */
801 if (cached_lhs
802 && (TREE_CODE (cached_lhs) == SSA_NAME
803 || is_gimple_min_invariant (cached_lhs)))
804 record_const_or_copy (TREE_OPERAND (stmt, 0), cached_lhs);
807 /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
808 will be taken. */
809 if (stmt
810 && (TREE_CODE (stmt) == COND_EXPR
811 || TREE_CODE (stmt) == GOTO_EXPR
812 || TREE_CODE (stmt) == SWITCH_EXPR))
814 tree cond, cached_lhs;
816 /* Now temporarily cprop the operands and try to find the resulting
817 expression in the hash tables. */
818 if (TREE_CODE (stmt) == COND_EXPR)
820 canonicalize_comparison (stmt);
821 cond = COND_EXPR_COND (stmt);
823 else if (TREE_CODE (stmt) == GOTO_EXPR)
824 cond = GOTO_DESTINATION (stmt);
825 else
826 cond = SWITCH_COND (stmt);
828 if (COMPARISON_CLASS_P (cond))
830 tree dummy_cond, op0, op1;
831 enum tree_code cond_code;
833 op0 = TREE_OPERAND (cond, 0);
834 op1 = TREE_OPERAND (cond, 1);
835 cond_code = TREE_CODE (cond);
837 /* Get the current value of both operands. */
838 if (TREE_CODE (op0) == SSA_NAME)
840 tree tmp = SSA_NAME_VALUE (op0);
841 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
842 op0 = tmp;
845 if (TREE_CODE (op1) == SSA_NAME)
847 tree tmp = SSA_NAME_VALUE (op1);
848 if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
849 op1 = tmp;
852 /* Stuff the operator and operands into our dummy conditional
853 expression, creating the dummy conditional if necessary. */
854 dummy_cond = (tree) walk_data->global_data;
855 if (! dummy_cond)
857 dummy_cond = build2 (cond_code, boolean_type_node, op0, op1);
858 dummy_cond = build3 (COND_EXPR, void_type_node,
859 dummy_cond, NULL_TREE, NULL_TREE);
860 walk_data->global_data = dummy_cond;
862 else
864 TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
865 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
866 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
869 /* We absolutely do not care about any type conversions
870 we only care about a zero/nonzero value. */
871 cached_lhs = fold (COND_EXPR_COND (dummy_cond));
872 while (TREE_CODE (cached_lhs) == NOP_EXPR
873 || TREE_CODE (cached_lhs) == CONVERT_EXPR
874 || TREE_CODE (cached_lhs) == NON_LVALUE_EXPR)
875 cached_lhs = TREE_OPERAND (cached_lhs, 0);
877 if (! is_gimple_min_invariant (cached_lhs))
879 cached_lhs = lookup_avail_expr (dummy_cond, false);
880 if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
881 cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond);
884 /* We can have conditionals which just test the state of a
885 variable rather than use a relational operator. These are
886 simpler to handle. */
887 else if (TREE_CODE (cond) == SSA_NAME)
889 cached_lhs = cond;
890 cached_lhs = SSA_NAME_VALUE (cached_lhs);
891 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
892 cached_lhs = NULL;
894 else
895 cached_lhs = lookup_avail_expr (stmt, false);
897 if (cached_lhs)
899 edge taken_edge = find_taken_edge (e->dest, cached_lhs);
900 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
902 if (dest == e->dest)
903 return;
905 /* If we have a known destination for the conditional, then
906 we can perform this optimization, which saves at least one
907 conditional jump each time it applies since we get to
908 bypass the conditional at our original destination. */
909 if (dest)
911 struct edge_info *edge_info;
913 if (e->aux)
914 edge_info = (struct edge_info *) e->aux;
915 else
916 edge_info = allocate_edge_info (e);
917 register_jump_thread (e, taken_edge);
924 /* Initialize local stacks for this optimizer and record equivalences
925 upon entry to BB. Equivalences can come from the edge traversed to
926 reach BB or they may come from PHI nodes at the start of BB. */
928 static void
929 dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
930 basic_block bb)
932 if (dump_file && (dump_flags & TDF_DETAILS))
933 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
935 /* Push a marker on the stacks of local information so that we know how
936 far to unwind when we finalize this block. */
937 VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
938 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
939 VEC_safe_push (tree, heap, nonzero_vars_stack, NULL_TREE);
940 VEC_safe_push (tree, heap, vrp_variables_stack, NULL_TREE);
942 record_equivalences_from_incoming_edge (bb);
944 /* PHI nodes can create equivalences too. */
945 record_equivalences_from_phis (bb);
948 /* Given an expression EXPR (a relational expression or a statement),
949 initialize the hash table element pointed to by ELEMENT. */
951 static void
952 initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
954 /* Hash table elements may be based on conditional expressions or statements.
956 For the former case, we have no annotation and we want to hash the
957 conditional expression. In the latter case we have an annotation and
958 we want to record the expression the statement evaluates. */
959 if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
961 element->stmt = NULL;
962 element->rhs = expr;
964 else if (TREE_CODE (expr) == COND_EXPR)
966 element->stmt = expr;
967 element->rhs = COND_EXPR_COND (expr);
969 else if (TREE_CODE (expr) == SWITCH_EXPR)
971 element->stmt = expr;
972 element->rhs = SWITCH_COND (expr);
974 else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
976 element->stmt = expr;
977 element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
979 else if (TREE_CODE (expr) == GOTO_EXPR)
981 element->stmt = expr;
982 element->rhs = GOTO_DESTINATION (expr);
984 else
986 element->stmt = expr;
987 element->rhs = TREE_OPERAND (expr, 1);
990 element->lhs = lhs;
991 element->hash = avail_expr_hash (element);
994 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
995 LIMIT entries left in LOCALs. */
997 static void
998 remove_local_expressions_from_table (void)
1000 /* Remove all the expressions made available in this block. */
1001 while (VEC_length (tree, avail_exprs_stack) > 0)
1003 struct expr_hash_elt element;
1004 tree expr = VEC_pop (tree, avail_exprs_stack);
1006 if (expr == NULL_TREE)
1007 break;
1009 initialize_hash_element (expr, NULL, &element);
1010 htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
1014 /* Use the SSA_NAMES in LOCALS to restore TABLE to its original
1015 state, stopping when there are LIMIT entries left in LOCALs. */
1017 static void
1018 restore_nonzero_vars_to_original_value (void)
1020 while (VEC_length (tree, nonzero_vars_stack) > 0)
1022 tree name = VEC_pop (tree, nonzero_vars_stack);
1024 if (name == NULL)
1025 break;
1027 bitmap_clear_bit (nonzero_vars, SSA_NAME_VERSION (name));
1031 /* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
1032 CONST_AND_COPIES to its original state, stopping when we hit a
1033 NULL marker. */
1035 static void
1036 restore_vars_to_original_value (void)
1038 while (VEC_length (tree, const_and_copies_stack) > 0)
1040 tree prev_value, dest;
1042 dest = VEC_pop (tree, const_and_copies_stack);
1044 if (dest == NULL)
1045 break;
1047 prev_value = VEC_pop (tree, const_and_copies_stack);
1048 SSA_NAME_VALUE (dest) = prev_value;
1052 /* We have finished processing the dominator children of BB, perform
1053 any finalization actions in preparation for leaving this node in
1054 the dominator tree. */
1056 static void
1057 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
1059 tree last;
1061 /* If we have an outgoing edge to a block with multiple incoming and
1062 outgoing edges, then we may be able to thread the edge. ie, we
1063 may be able to statically determine which of the outgoing edges
1064 will be traversed when the incoming edge from BB is traversed. */
1065 if (single_succ_p (bb)
1066 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1067 && !single_pred_p (single_succ (bb))
1068 && !single_succ_p (single_succ (bb)))
1071 thread_across_edge (walk_data, single_succ_edge (bb));
1073 else if ((last = last_stmt (bb))
1074 && TREE_CODE (last) == COND_EXPR
1075 && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
1076 || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
1077 && EDGE_COUNT (bb->succs) == 2
1078 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1079 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1081 edge true_edge, false_edge;
1083 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1085 /* Only try to thread the edge if it reaches a target block with
1086 more than one predecessor and more than one successor. */
1087 if (!single_pred_p (true_edge->dest) && !single_succ_p (true_edge->dest))
1089 struct edge_info *edge_info;
1090 unsigned int i;
1092 /* Push a marker onto the available expression stack so that we
1093 unwind any expressions related to the TRUE arm before processing
1094 the false arm below. */
1095 VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
1096 VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1098 edge_info = (struct edge_info *) true_edge->aux;
1100 /* If we have info associated with this edge, record it into
1101 our equivalency tables. */
1102 if (edge_info)
1104 tree *cond_equivalences = edge_info->cond_equivalences;
1105 tree lhs = edge_info->lhs;
1106 tree rhs = edge_info->rhs;
1108 /* If we have a simple NAME = VALUE equivalency record it. */
1109 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1110 record_const_or_copy (lhs, rhs);
1112 /* If we have 0 = COND or 1 = COND equivalences, record them
1113 into our expression hash tables. */
1114 if (cond_equivalences)
1115 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1117 tree expr = cond_equivalences[i];
1118 tree value = cond_equivalences[i + 1];
1120 record_cond (expr, value);
1124 /* Now thread the edge. */
1125 thread_across_edge (walk_data, true_edge);
1127 /* And restore the various tables to their state before
1128 we threaded this edge. */
1129 remove_local_expressions_from_table ();
1130 restore_vars_to_original_value ();
1133 /* Similarly for the ELSE arm. */
1134 if (!single_pred_p (false_edge->dest) && !single_succ_p (false_edge->dest))
1136 struct edge_info *edge_info;
1137 unsigned int i;
1139 edge_info = (struct edge_info *) false_edge->aux;
1141 /* If we have info associated with this edge, record it into
1142 our equivalency tables. */
1143 if (edge_info)
1145 tree *cond_equivalences = edge_info->cond_equivalences;
1146 tree lhs = edge_info->lhs;
1147 tree rhs = edge_info->rhs;
1149 /* If we have a simple NAME = VALUE equivalency record it. */
1150 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1151 record_const_or_copy (lhs, rhs);
1153 /* If we have 0 = COND or 1 = COND equivalences, record them
1154 into our expression hash tables. */
1155 if (cond_equivalences)
1156 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1158 tree expr = cond_equivalences[i];
1159 tree value = cond_equivalences[i + 1];
1161 record_cond (expr, value);
1165 thread_across_edge (walk_data, false_edge);
1167 /* No need to remove local expressions from our tables
1168 or restore vars to their original value as that will
1169 be done immediately below. */
1173 remove_local_expressions_from_table ();
1174 restore_nonzero_vars_to_original_value ();
1175 restore_vars_to_original_value ();
1177 /* Remove VRP records associated with this basic block. They are no
1178 longer valid.
1180 To be efficient, we note which variables have had their values
1181 constrained in this block. So walk over each variable in the
1182 VRP_VARIABLEs array. */
1183 while (VEC_length (tree, vrp_variables_stack) > 0)
1185 tree var = VEC_pop (tree, vrp_variables_stack);
1186 struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
1187 void **slot;
1189 /* Each variable has a stack of value range records. We want to
1190 invalidate those associated with our basic block. So we walk
1191 the array backwards popping off records associated with our
1192 block. Once we hit a record not associated with our block
1193 we are done. */
1194 VEC(vrp_element_p,heap) **var_vrp_records;
1196 if (var == NULL)
1197 break;
1199 vrp_hash_elt.var = var;
1200 vrp_hash_elt.records = NULL;
1202 slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
1204 vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
1205 var_vrp_records = &vrp_hash_elt_p->records;
1207 while (VEC_length (vrp_element_p, *var_vrp_records) > 0)
1209 struct vrp_element *element
1210 = VEC_last (vrp_element_p, *var_vrp_records);
1212 if (element->bb != bb)
1213 break;
1215 VEC_pop (vrp_element_p, *var_vrp_records);
1219 /* If we queued any statements to rescan in this block, then
1220 go ahead and rescan them now. */
1221 while (VEC_length (tree, stmts_to_rescan) > 0)
1223 tree stmt = VEC_last (tree, stmts_to_rescan);
1224 basic_block stmt_bb = bb_for_stmt (stmt);
1226 if (stmt_bb != bb)
1227 break;
1229 VEC_pop (tree, stmts_to_rescan);
1230 mark_new_vars_to_rename (stmt);
1234 /* PHI nodes can create equivalences too.
1236 Ignoring any alternatives which are the same as the result, if
1237 all the alternatives are equal, then the PHI node creates an
1238 equivalence.
1240 Additionally, if all the PHI alternatives are known to have a nonzero
1241 value, then the result of this PHI is known to have a nonzero value,
1242 even if we do not know its exact value. */
1244 static void
1245 record_equivalences_from_phis (basic_block bb)
1247 tree phi;
1249 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1251 tree lhs = PHI_RESULT (phi);
1252 tree rhs = NULL;
1253 int i;
1255 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1257 tree t = PHI_ARG_DEF (phi, i);
1259 /* Ignore alternatives which are the same as our LHS. Since
1260 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1261 can simply compare pointers. */
1262 if (lhs == t)
1263 continue;
1265 /* If we have not processed an alternative yet, then set
1266 RHS to this alternative. */
1267 if (rhs == NULL)
1268 rhs = t;
1269 /* If we have processed an alternative (stored in RHS), then
1270 see if it is equal to this one. If it isn't, then stop
1271 the search. */
1272 else if (! operand_equal_for_phi_arg_p (rhs, t))
1273 break;
1276 /* If we had no interesting alternatives, then all the RHS alternatives
1277 must have been the same as LHS. */
1278 if (!rhs)
1279 rhs = lhs;
1281 /* If we managed to iterate through each PHI alternative without
1282 breaking out of the loop, then we have a PHI which may create
1283 a useful equivalence. We do not need to record unwind data for
1284 this, since this is a true assignment and not an equivalence
1285 inferred from a comparison. All uses of this ssa name are dominated
1286 by this assignment, so unwinding just costs time and space. */
1287 if (i == PHI_NUM_ARGS (phi)
1288 && may_propagate_copy (lhs, rhs))
1289 SSA_NAME_VALUE (lhs) = rhs;
1291 /* Now see if we know anything about the nonzero property for the
1292 result of this PHI. */
1293 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1295 if (!PHI_ARG_NONZERO (phi, i))
1296 break;
1299 if (i == PHI_NUM_ARGS (phi))
1300 bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
1304 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1305 return that edge. Otherwise return NULL. */
1306 static edge
1307 single_incoming_edge_ignoring_loop_edges (basic_block bb)
1309 edge retval = NULL;
1310 edge e;
1311 edge_iterator ei;
1313 FOR_EACH_EDGE (e, ei, bb->preds)
1315 /* A loop back edge can be identified by the destination of
1316 the edge dominating the source of the edge. */
1317 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1318 continue;
1320 /* If we have already seen a non-loop edge, then we must have
1321 multiple incoming non-loop edges and thus we return NULL. */
1322 if (retval)
1323 return NULL;
1325 /* This is the first non-loop incoming edge we have found. Record
1326 it. */
1327 retval = e;
1330 return retval;
1333 /* Record any equivalences created by the incoming edge to BB. If BB
1334 has more than one incoming edge, then no equivalence is created. */
1336 static void
1337 record_equivalences_from_incoming_edge (basic_block bb)
1339 edge e;
1340 basic_block parent;
1341 struct edge_info *edge_info;
1343 /* If our parent block ended with a control statement, then we may be
1344 able to record some equivalences based on which outgoing edge from
1345 the parent was followed. */
1346 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1348 e = single_incoming_edge_ignoring_loop_edges (bb);
1350 /* If we had a single incoming edge from our parent block, then enter
1351 any data associated with the edge into our tables. */
1352 if (e && e->src == parent)
1354 unsigned int i;
1356 edge_info = (struct edge_info *) e->aux;
1358 if (edge_info)
1360 tree lhs = edge_info->lhs;
1361 tree rhs = edge_info->rhs;
1362 tree *cond_equivalences = edge_info->cond_equivalences;
1364 if (lhs)
1365 record_equality (lhs, rhs);
1367 if (cond_equivalences)
1369 bool recorded_range = false;
1370 for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1372 tree expr = cond_equivalences[i];
1373 tree value = cond_equivalences[i + 1];
1375 record_cond (expr, value);
1377 /* For the first true equivalence, record range
1378 information. We only do this for the first
1379 true equivalence as it should dominate any
1380 later true equivalences. */
1381 if (! recorded_range
1382 && COMPARISON_CLASS_P (expr)
1383 && value == boolean_true_node
1384 && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
1386 record_range (expr, bb);
1387 recorded_range = true;
1395 /* Dump SSA statistics on FILE. */
1397 void
1398 dump_dominator_optimization_stats (FILE *file)
1400 long n_exprs;
1402 fprintf (file, "Total number of statements: %6ld\n\n",
1403 opt_stats.num_stmts);
1404 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1405 opt_stats.num_exprs_considered);
1407 n_exprs = opt_stats.num_exprs_considered;
1408 if (n_exprs == 0)
1409 n_exprs = 1;
1411 fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
1412 opt_stats.num_re, PERCENT (opt_stats.num_re,
1413 n_exprs));
1414 fprintf (file, " Constants propagated: %6ld\n",
1415 opt_stats.num_const_prop);
1416 fprintf (file, " Copies propagated: %6ld\n",
1417 opt_stats.num_copy_prop);
1419 fprintf (file, "\nTotal number of DOM iterations: %6ld\n",
1420 opt_stats.num_iterations);
1422 fprintf (file, "\nHash table statistics:\n");
1424 fprintf (file, " avail_exprs: ");
1425 htab_statistics (file, avail_exprs);
1429 /* Dump SSA statistics on stderr. */
1431 void
1432 debug_dominator_optimization_stats (void)
1434 dump_dominator_optimization_stats (stderr);
1438 /* Dump statistics for the hash table HTAB. */
1440 static void
1441 htab_statistics (FILE *file, htab_t htab)
1443 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1444 (long) htab_size (htab),
1445 (long) htab_elements (htab),
1446 htab_collisions (htab));
1449 /* Record the fact that VAR has a nonzero value, though we may not know
1450 its exact value. Note that if VAR is already known to have a nonzero
1451 value, then we do nothing. */
1453 static void
1454 record_var_is_nonzero (tree var)
1456 int indx = SSA_NAME_VERSION (var);
1458 if (bitmap_bit_p (nonzero_vars, indx))
1459 return;
1461 /* Mark it in the global table. */
1462 bitmap_set_bit (nonzero_vars, indx);
1464 /* Record this SSA_NAME so that we can reset the global table
1465 when we leave this block. */
1466 VEC_safe_push (tree, heap, nonzero_vars_stack, var);
1469 /* Enter a statement into the true/false expression hash table indicating
1470 that the condition COND has the value VALUE. */
1472 static void
1473 record_cond (tree cond, tree value)
1475 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1476 void **slot;
1478 initialize_hash_element (cond, value, element);
1480 slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1481 element->hash, INSERT);
1482 if (*slot == NULL)
1484 *slot = (void *) element;
1485 VEC_safe_push (tree, heap, avail_exprs_stack, cond);
1487 else
1488 free (element);
1491 /* Build a new conditional using NEW_CODE, OP0 and OP1 and store
1492 the new conditional into *p, then store a boolean_true_node
1493 into *(p + 1). */
1495 static void
1496 build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
1498 *p = build2 (new_code, boolean_type_node, op0, op1);
1499 p++;
1500 *p = boolean_true_node;
1503 /* Record that COND is true and INVERTED is false into the edge information
1504 structure. Also record that any conditions dominated by COND are true
1505 as well.
1507 For example, if a < b is true, then a <= b must also be true. */
1509 static void
1510 record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1512 tree op0, op1;
1514 if (!COMPARISON_CLASS_P (cond))
1515 return;
1517 op0 = TREE_OPERAND (cond, 0);
1518 op1 = TREE_OPERAND (cond, 1);
1520 switch (TREE_CODE (cond))
1522 case LT_EXPR:
1523 case GT_EXPR:
1524 edge_info->max_cond_equivalences = 12;
1525 edge_info->cond_equivalences = XNEWVEC (tree, 12);
1526 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1527 ? LE_EXPR : GE_EXPR),
1528 op0, op1, &edge_info->cond_equivalences[4]);
1529 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1530 &edge_info->cond_equivalences[6]);
1531 build_and_record_new_cond (NE_EXPR, op0, op1,
1532 &edge_info->cond_equivalences[8]);
1533 build_and_record_new_cond (LTGT_EXPR, op0, op1,
1534 &edge_info->cond_equivalences[10]);
1535 break;
1537 case GE_EXPR:
1538 case LE_EXPR:
1539 edge_info->max_cond_equivalences = 6;
1540 edge_info->cond_equivalences = XNEWVEC (tree, 6);
1541 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1542 &edge_info->cond_equivalences[4]);
1543 break;
1545 case EQ_EXPR:
1546 edge_info->max_cond_equivalences = 10;
1547 edge_info->cond_equivalences = XNEWVEC (tree, 10);
1548 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1549 &edge_info->cond_equivalences[4]);
1550 build_and_record_new_cond (LE_EXPR, op0, op1,
1551 &edge_info->cond_equivalences[6]);
1552 build_and_record_new_cond (GE_EXPR, op0, op1,
1553 &edge_info->cond_equivalences[8]);
1554 break;
1556 case UNORDERED_EXPR:
1557 edge_info->max_cond_equivalences = 16;
1558 edge_info->cond_equivalences = XNEWVEC (tree, 16);
1559 build_and_record_new_cond (NE_EXPR, op0, op1,
1560 &edge_info->cond_equivalences[4]);
1561 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1562 &edge_info->cond_equivalences[6]);
1563 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1564 &edge_info->cond_equivalences[8]);
1565 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1566 &edge_info->cond_equivalences[10]);
1567 build_and_record_new_cond (UNLT_EXPR, op0, op1,
1568 &edge_info->cond_equivalences[12]);
1569 build_and_record_new_cond (UNGT_EXPR, op0, op1,
1570 &edge_info->cond_equivalences[14]);
1571 break;
1573 case UNLT_EXPR:
1574 case UNGT_EXPR:
1575 edge_info->max_cond_equivalences = 8;
1576 edge_info->cond_equivalences = XNEWVEC (tree, 8);
1577 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1578 ? UNLE_EXPR : UNGE_EXPR),
1579 op0, op1, &edge_info->cond_equivalences[4]);
1580 build_and_record_new_cond (NE_EXPR, op0, op1,
1581 &edge_info->cond_equivalences[6]);
1582 break;
1584 case UNEQ_EXPR:
1585 edge_info->max_cond_equivalences = 8;
1586 edge_info->cond_equivalences = XNEWVEC (tree, 8);
1587 build_and_record_new_cond (UNLE_EXPR, op0, op1,
1588 &edge_info->cond_equivalences[4]);
1589 build_and_record_new_cond (UNGE_EXPR, op0, op1,
1590 &edge_info->cond_equivalences[6]);
1591 break;
1593 case LTGT_EXPR:
1594 edge_info->max_cond_equivalences = 8;
1595 edge_info->cond_equivalences = XNEWVEC (tree, 8);
1596 build_and_record_new_cond (NE_EXPR, op0, op1,
1597 &edge_info->cond_equivalences[4]);
1598 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1599 &edge_info->cond_equivalences[6]);
1600 break;
1602 default:
1603 edge_info->max_cond_equivalences = 4;
1604 edge_info->cond_equivalences = XNEWVEC (tree, 4);
1605 break;
1608 /* Now store the original true and false conditions into the first
1609 two slots. */
1610 edge_info->cond_equivalences[0] = cond;
1611 edge_info->cond_equivalences[1] = boolean_true_node;
1612 edge_info->cond_equivalences[2] = inverted;
1613 edge_info->cond_equivalences[3] = boolean_false_node;
1616 /* A helper function for record_const_or_copy and record_equality.
1617 Do the work of recording the value and undo info. */
1619 static void
1620 record_const_or_copy_1 (tree x, tree y, tree prev_x)
1622 SSA_NAME_VALUE (x) = y;
1624 VEC_reserve (tree, heap, const_and_copies_stack, 2);
1625 VEC_quick_push (tree, const_and_copies_stack, prev_x);
1626 VEC_quick_push (tree, const_and_copies_stack, x);
1630 /* Return the loop depth of the basic block of the defining statement of X.
1631 This number should not be treated as absolutely correct because the loop
1632 information may not be completely up-to-date when dom runs. However, it
1633 will be relatively correct, and as more passes are taught to keep loop info
1634 up to date, the result will become more and more accurate. */
1637 loop_depth_of_name (tree x)
1639 tree defstmt;
1640 basic_block defbb;
1642 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1643 if (TREE_CODE (x) != SSA_NAME)
1644 return 0;
1646 /* Otherwise return the loop depth of the defining statement's bb.
1647 Note that there may not actually be a bb for this statement, if the
1648 ssa_name is live on entry. */
1649 defstmt = SSA_NAME_DEF_STMT (x);
1650 defbb = bb_for_stmt (defstmt);
1651 if (!defbb)
1652 return 0;
1654 return defbb->loop_depth;
1658 /* Record that X is equal to Y in const_and_copies. Record undo
1659 information in the block-local vector. */
1661 static void
1662 record_const_or_copy (tree x, tree y)
1664 tree prev_x = SSA_NAME_VALUE (x);
1666 if (TREE_CODE (y) == SSA_NAME)
1668 tree tmp = SSA_NAME_VALUE (y);
1669 if (tmp)
1670 y = tmp;
1673 record_const_or_copy_1 (x, y, prev_x);
1676 /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1677 This constrains the cases in which we may treat this as assignment. */
1679 static void
1680 record_equality (tree x, tree y)
1682 tree prev_x = NULL, prev_y = NULL;
1684 if (TREE_CODE (x) == SSA_NAME)
1685 prev_x = SSA_NAME_VALUE (x);
1686 if (TREE_CODE (y) == SSA_NAME)
1687 prev_y = SSA_NAME_VALUE (y);
1689 /* If one of the previous values is invariant, or invariant in more loops
1690 (by depth), then use that.
1691 Otherwise it doesn't matter which value we choose, just so
1692 long as we canonicalize on one value. */
1693 if (TREE_INVARIANT (y))
1695 else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1696 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1697 else if (prev_x && TREE_INVARIANT (prev_x))
1698 x = y, y = prev_x, prev_x = prev_y;
1699 else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
1700 y = prev_y;
1702 /* After the swapping, we must have one SSA_NAME. */
1703 if (TREE_CODE (x) != SSA_NAME)
1704 return;
1706 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1707 variable compared against zero. If we're honoring signed zeros,
1708 then we cannot record this value unless we know that the value is
1709 nonzero. */
1710 if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1711 && (TREE_CODE (y) != REAL_CST
1712 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1713 return;
1715 record_const_or_copy_1 (x, y, prev_x);
1718 /* Return true, if it is ok to do folding of an associative expression.
1719 EXP is the tree for the associative expression. */
1721 static inline bool
1722 unsafe_associative_fp_binop (tree exp)
1724 enum tree_code code = TREE_CODE (exp);
1725 return !(!flag_unsafe_math_optimizations
1726 && (code == MULT_EXPR || code == PLUS_EXPR
1727 || code == MINUS_EXPR)
1728 && FLOAT_TYPE_P (TREE_TYPE (exp)));
1731 /* Returns true when STMT is a simple iv increment. It detects the
1732 following situation:
1734 i_1 = phi (..., i_2)
1735 i_2 = i_1 +/- ... */
1737 static bool
1738 simple_iv_increment_p (tree stmt)
1740 tree lhs, rhs, preinc, phi;
1741 unsigned i;
1743 if (TREE_CODE (stmt) != MODIFY_EXPR)
1744 return false;
1746 lhs = TREE_OPERAND (stmt, 0);
1747 if (TREE_CODE (lhs) != SSA_NAME)
1748 return false;
1750 rhs = TREE_OPERAND (stmt, 1);
1752 if (TREE_CODE (rhs) != PLUS_EXPR
1753 && TREE_CODE (rhs) != MINUS_EXPR)
1754 return false;
1756 preinc = TREE_OPERAND (rhs, 0);
1757 if (TREE_CODE (preinc) != SSA_NAME)
1758 return false;
1760 phi = SSA_NAME_DEF_STMT (preinc);
1761 if (TREE_CODE (phi) != PHI_NODE)
1762 return false;
1764 for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
1765 if (PHI_ARG_DEF (phi, i) == lhs)
1766 return true;
1768 return false;
1771 /* STMT is a COND_EXPR for which we could not trivially determine its
1772 result. This routine attempts to find equivalent forms of the
1773 condition which we may be able to optimize better. It also
1774 uses simple value range propagation to optimize conditionals. */
1776 static tree
1777 simplify_cond_and_lookup_avail_expr (tree stmt)
1779 tree cond = COND_EXPR_COND (stmt);
1781 if (COMPARISON_CLASS_P (cond))
1783 tree op0 = TREE_OPERAND (cond, 0);
1784 tree op1 = TREE_OPERAND (cond, 1);
1786 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
1788 int limit;
1789 tree low, high, cond_low, cond_high;
1790 int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
1791 VEC(vrp_element_p,heap) **vrp_records;
1792 struct vrp_element *element;
1793 struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
1794 void **slot;
1796 /* Consult the value range records for this variable (if they exist)
1797 to see if we can eliminate or simplify this conditional.
1799 Note two tests are necessary to determine no records exist.
1800 First we have to see if the virtual array exists, if it
1801 exists, then we have to check its active size.
1803 Also note the vast majority of conditionals are not testing
1804 a variable which has had its range constrained by an earlier
1805 conditional. So this filter avoids a lot of unnecessary work. */
1806 vrp_hash_elt.var = op0;
1807 vrp_hash_elt.records = NULL;
1808 slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
1809 if (slot == NULL)
1810 return NULL;
1812 vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
1813 vrp_records = &vrp_hash_elt_p->records;
1815 limit = VEC_length (vrp_element_p, *vrp_records);
1817 /* If we have no value range records for this variable, or we are
1818 unable to extract a range for this condition, then there is
1819 nothing to do. */
1820 if (limit == 0
1821 || ! extract_range_from_cond (cond, &cond_high,
1822 &cond_low, &cond_inverted))
1823 return NULL;
1825 /* We really want to avoid unnecessary computations of range
1826 info. So all ranges are computed lazily; this avoids a
1827 lot of unnecessary work. i.e., we record the conditional,
1828 but do not process how it constrains the variable's
1829 potential values until we know that processing the condition
1830 could be helpful.
1832 However, we do not want to have to walk a potentially long
1833 list of ranges, nor do we want to compute a variable's
1834 range more than once for a given path.
1836 Luckily, each time we encounter a conditional that can not
1837 be otherwise optimized we will end up here and we will
1838 compute the necessary range information for the variable
1839 used in this condition.
1841 Thus you can conclude that there will never be more than one
1842 conditional associated with a variable which has not been
1843 processed. So we never need to merge more than one new
1844 conditional into the current range.
1846 These properties also help us avoid unnecessary work. */
1847 element = VEC_last (vrp_element_p, *vrp_records);
1849 if (element->high && element->low)
1851 /* The last element has been processed, so there is no range
1852 merging to do, we can simply use the high/low values
1853 recorded in the last element. */
1854 low = element->low;
1855 high = element->high;
1857 else
1859 tree tmp_high, tmp_low;
1860 int dummy;
1862 /* The last element has not been processed. Process it now.
1863 record_range should ensure for cond inverted is not set.
1864 This call can only fail if cond is x < min or x > max,
1865 which fold should have optimized into false.
1866 If that doesn't happen, just pretend all values are
1867 in the range. */
1868 if (! extract_range_from_cond (element->cond, &tmp_high,
1869 &tmp_low, &dummy))
1870 gcc_unreachable ();
1871 else
1872 gcc_assert (dummy == 0);
1874 /* If this is the only element, then no merging is necessary,
1875 the high/low values from extract_range_from_cond are all
1876 we need. */
1877 if (limit == 1)
1879 low = tmp_low;
1880 high = tmp_high;
1882 else
1884 /* Get the high/low value from the previous element. */
1885 struct vrp_element *prev
1886 = VEC_index (vrp_element_p, *vrp_records, limit - 2);
1887 low = prev->low;
1888 high = prev->high;
1890 /* Merge in this element's range with the range from the
1891 previous element.
1893 The low value for the merged range is the maximum of
1894 the previous low value and the low value of this record.
1896 Similarly the high value for the merged range is the
1897 minimum of the previous high value and the high value of
1898 this record. */
1899 low = (low && tree_int_cst_compare (low, tmp_low) == 1
1900 ? low : tmp_low);
1901 high = (high && tree_int_cst_compare (high, tmp_high) == -1
1902 ? high : tmp_high);
1905 /* And record the computed range. */
1906 element->low = low;
1907 element->high = high;
1911 /* After we have constrained this variable's potential values,
1912 we try to determine the result of the given conditional.
1914 To simplify later tests, first determine if the current
1915 low value is the same low value as the conditional.
1916 Similarly for the current high value and the high value
1917 for the conditional. */
1918 lowequal = tree_int_cst_equal (low, cond_low);
1919 highequal = tree_int_cst_equal (high, cond_high);
1921 if (lowequal && highequal)
1922 return (cond_inverted ? boolean_false_node : boolean_true_node);
1924 /* To simplify the overlap/subset tests below we may want
1925 to swap the two ranges so that the larger of the two
1926 ranges occurs "first". */
1927 swapped = 0;
1928 if (tree_int_cst_compare (low, cond_low) == 1
1929 || (lowequal
1930 && tree_int_cst_compare (cond_high, high) == 1))
1932 tree temp;
1934 swapped = 1;
1935 temp = low;
1936 low = cond_low;
1937 cond_low = temp;
1938 temp = high;
1939 high = cond_high;
1940 cond_high = temp;
1943 /* Now determine if there is no overlap in the ranges
1944 or if the second range is a subset of the first range. */
1945 no_overlap = tree_int_cst_lt (high, cond_low);
1946 subset = tree_int_cst_compare (cond_high, high) != 1;
1948 /* If there was no overlap in the ranges, then this conditional
1949 always has a false value (unless we had to invert this
1950 conditional, in which case it always has a true value). */
1951 if (no_overlap)
1952 return (cond_inverted ? boolean_true_node : boolean_false_node);
1954 /* If the current range is a subset of the condition's range,
1955 then this conditional always has a true value (unless we
1956 had to invert this conditional, in which case it always
1957 has a true value). */
1958 if (subset && swapped)
1959 return (cond_inverted ? boolean_false_node : boolean_true_node);
1961 /* We were unable to determine the result of the conditional.
1962 However, we may be able to simplify the conditional. First
1963 merge the ranges in the same manner as range merging above. */
1964 low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
1965 high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
1967 /* If the range has converged to a single point, then turn this
1968 into an equality comparison. */
1969 if (TREE_CODE (cond) != EQ_EXPR
1970 && TREE_CODE (cond) != NE_EXPR
1971 && tree_int_cst_equal (low, high))
1973 TREE_SET_CODE (cond, EQ_EXPR);
1974 TREE_OPERAND (cond, 1) = high;
1978 return 0;
1981 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1982 known value for that SSA_NAME (or NULL if no value is known).
1984 NONZERO_VARS is the set SSA_NAMES known to have a nonzero value,
1985 even if we don't know their precise value.
1987 Propagate values from CONST_AND_COPIES and NONZERO_VARS into the PHI
1988 nodes of the successors of BB. */
1990 static void
1991 cprop_into_successor_phis (basic_block bb, bitmap nonzero_vars)
1993 edge e;
1994 edge_iterator ei;
1996 FOR_EACH_EDGE (e, ei, bb->succs)
1998 tree phi;
1999 int indx;
2001 /* If this is an abnormal edge, then we do not want to copy propagate
2002 into the PHI alternative associated with this edge. */
2003 if (e->flags & EDGE_ABNORMAL)
2004 continue;
2006 phi = phi_nodes (e->dest);
2007 if (! phi)
2008 continue;
2010 indx = e->dest_idx;
2011 for ( ; phi; phi = PHI_CHAIN (phi))
2013 tree new;
2014 use_operand_p orig_p;
2015 tree orig;
2017 /* The alternative may be associated with a constant, so verify
2018 it is an SSA_NAME before doing anything with it. */
2019 orig_p = PHI_ARG_DEF_PTR (phi, indx);
2020 orig = USE_FROM_PTR (orig_p);
2021 if (TREE_CODE (orig) != SSA_NAME)
2022 continue;
2024 /* If the alternative is known to have a nonzero value, record
2025 that fact in the PHI node itself for future use. */
2026 if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
2027 PHI_ARG_NONZERO (phi, indx) = true;
2029 /* If we have *ORIG_P in our constant/copy table, then replace
2030 ORIG_P with its value in our constant/copy table. */
2031 new = SSA_NAME_VALUE (orig);
2032 if (new
2033 && new != orig
2034 && (TREE_CODE (new) == SSA_NAME
2035 || is_gimple_min_invariant (new))
2036 && may_propagate_copy (orig, new))
2037 propagate_value (orig_p, new);
2042 /* We have finished optimizing BB, record any information implied by
2043 taking a specific outgoing edge from BB. */
2045 static void
2046 record_edge_info (basic_block bb)
2048 block_stmt_iterator bsi = bsi_last (bb);
2049 struct edge_info *edge_info;
2051 if (! bsi_end_p (bsi))
2053 tree stmt = bsi_stmt (bsi);
2055 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
2057 tree cond = SWITCH_COND (stmt);
2059 if (TREE_CODE (cond) == SSA_NAME)
2061 tree labels = SWITCH_LABELS (stmt);
2062 int i, n_labels = TREE_VEC_LENGTH (labels);
2063 tree *info = XCNEWVEC (tree, last_basic_block);
2064 edge e;
2065 edge_iterator ei;
2067 for (i = 0; i < n_labels; i++)
2069 tree label = TREE_VEC_ELT (labels, i);
2070 basic_block target_bb = label_to_block (CASE_LABEL (label));
2072 if (CASE_HIGH (label)
2073 || !CASE_LOW (label)
2074 || info[target_bb->index])
2075 info[target_bb->index] = error_mark_node;
2076 else
2077 info[target_bb->index] = label;
2080 FOR_EACH_EDGE (e, ei, bb->succs)
2082 basic_block target_bb = e->dest;
2083 tree node = info[target_bb->index];
2085 if (node != NULL && node != error_mark_node)
2087 tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
2088 edge_info = allocate_edge_info (e);
2089 edge_info->lhs = cond;
2090 edge_info->rhs = x;
2093 free (info);
2097 /* A COND_EXPR may create equivalences too. */
2098 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2100 tree cond = COND_EXPR_COND (stmt);
2101 edge true_edge;
2102 edge false_edge;
2104 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2106 /* If the conditional is a single variable 'X', record 'X = 1'
2107 for the true edge and 'X = 0' on the false edge. */
2108 if (SSA_VAR_P (cond))
2110 struct edge_info *edge_info;
2112 edge_info = allocate_edge_info (true_edge);
2113 edge_info->lhs = cond;
2114 edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
2116 edge_info = allocate_edge_info (false_edge);
2117 edge_info->lhs = cond;
2118 edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
2120 /* Equality tests may create one or two equivalences. */
2121 else if (COMPARISON_CLASS_P (cond))
2123 tree op0 = TREE_OPERAND (cond, 0);
2124 tree op1 = TREE_OPERAND (cond, 1);
2126 /* Special case comparing booleans against a constant as we
2127 know the value of OP0 on both arms of the branch. i.e., we
2128 can record an equivalence for OP0 rather than COND. */
2129 if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
2130 && TREE_CODE (op0) == SSA_NAME
2131 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2132 && is_gimple_min_invariant (op1))
2134 if (TREE_CODE (cond) == EQ_EXPR)
2136 edge_info = allocate_edge_info (true_edge);
2137 edge_info->lhs = op0;
2138 edge_info->rhs = (integer_zerop (op1)
2139 ? boolean_false_node
2140 : boolean_true_node);
2142 edge_info = allocate_edge_info (false_edge);
2143 edge_info->lhs = op0;
2144 edge_info->rhs = (integer_zerop (op1)
2145 ? boolean_true_node
2146 : boolean_false_node);
2148 else
2150 edge_info = allocate_edge_info (true_edge);
2151 edge_info->lhs = op0;
2152 edge_info->rhs = (integer_zerop (op1)
2153 ? boolean_true_node
2154 : boolean_false_node);
2156 edge_info = allocate_edge_info (false_edge);
2157 edge_info->lhs = op0;
2158 edge_info->rhs = (integer_zerop (op1)
2159 ? boolean_false_node
2160 : boolean_true_node);
2164 else if (is_gimple_min_invariant (op0)
2165 && (TREE_CODE (op1) == SSA_NAME
2166 || is_gimple_min_invariant (op1)))
2168 tree inverted = invert_truthvalue (cond);
2169 struct edge_info *edge_info;
2171 edge_info = allocate_edge_info (true_edge);
2172 record_conditions (edge_info, cond, inverted);
2174 if (TREE_CODE (cond) == EQ_EXPR)
2176 edge_info->lhs = op1;
2177 edge_info->rhs = op0;
2180 edge_info = allocate_edge_info (false_edge);
2181 record_conditions (edge_info, inverted, cond);
2183 if (TREE_CODE (cond) == NE_EXPR)
2185 edge_info->lhs = op1;
2186 edge_info->rhs = op0;
2190 else if (TREE_CODE (op0) == SSA_NAME
2191 && (is_gimple_min_invariant (op1)
2192 || TREE_CODE (op1) == SSA_NAME))
2194 tree inverted = invert_truthvalue (cond);
2195 struct edge_info *edge_info;
2197 edge_info = allocate_edge_info (true_edge);
2198 record_conditions (edge_info, cond, inverted);
2200 if (TREE_CODE (cond) == EQ_EXPR)
2202 edge_info->lhs = op0;
2203 edge_info->rhs = op1;
2206 edge_info = allocate_edge_info (false_edge);
2207 record_conditions (edge_info, inverted, cond);
2209 if (TREE_CODE (cond) == NE_EXPR)
2211 edge_info->lhs = op0;
2212 edge_info->rhs = op1;
2217 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
2222 /* Propagate information from BB to its outgoing edges.
2224 This can include equivalency information implied by control statements
2225 at the end of BB and const/copy propagation into PHIs in BB's
2226 successor blocks. */
2228 static void
2229 propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2230 basic_block bb)
2232 record_edge_info (bb);
2233 cprop_into_successor_phis (bb, nonzero_vars);
2236 /* Search for redundant computations in STMT. If any are found, then
2237 replace them with the variable holding the result of the computation.
2239 If safe, record this expression into the available expression hash
2240 table. */
2242 static bool
2243 eliminate_redundant_computations (tree stmt)
2245 tree *expr_p, def = NULL_TREE;
2246 bool insert = true;
2247 tree cached_lhs;
2248 bool retval = false;
2249 bool modify_expr_p = false;
2251 if (TREE_CODE (stmt) == MODIFY_EXPR)
2252 def = TREE_OPERAND (stmt, 0);
2254 /* Certain expressions on the RHS can be optimized away, but can not
2255 themselves be entered into the hash tables. */
2256 if (! def
2257 || TREE_CODE (def) != SSA_NAME
2258 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2259 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
2260 /* Do not record equivalences for increments of ivs. This would create
2261 overlapping live ranges for a very questionable gain. */
2262 || simple_iv_increment_p (stmt))
2263 insert = false;
2265 /* Check if the expression has been computed before. */
2266 cached_lhs = lookup_avail_expr (stmt, insert);
2268 /* If this is a COND_EXPR and we did not find its expression in
2269 the hash table, simplify the condition and try again. */
2270 if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
2271 cached_lhs = simplify_cond_and_lookup_avail_expr (stmt);
2273 opt_stats.num_exprs_considered++;
2275 /* Get a pointer to the expression we are trying to optimize. */
2276 if (TREE_CODE (stmt) == COND_EXPR)
2277 expr_p = &COND_EXPR_COND (stmt);
2278 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2279 expr_p = &SWITCH_COND (stmt);
2280 else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
2282 expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2283 modify_expr_p = true;
2285 else
2287 expr_p = &TREE_OPERAND (stmt, 1);
2288 modify_expr_p = true;
2291 /* It is safe to ignore types here since we have already done
2292 type checking in the hashing and equality routines. In fact
2293 type checking here merely gets in the way of constant
2294 propagation. Also, make sure that it is safe to propagate
2295 CACHED_LHS into *EXPR_P. */
2296 if (cached_lhs
2297 && ((TREE_CODE (cached_lhs) != SSA_NAME
2298 && (modify_expr_p
2299 || tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
2300 TREE_TYPE (cached_lhs))))
2301 || may_propagate_copy (*expr_p, cached_lhs)))
2303 if (dump_file && (dump_flags & TDF_DETAILS))
2305 fprintf (dump_file, " Replaced redundant expr '");
2306 print_generic_expr (dump_file, *expr_p, dump_flags);
2307 fprintf (dump_file, "' with '");
2308 print_generic_expr (dump_file, cached_lhs, dump_flags);
2309 fprintf (dump_file, "'\n");
2312 opt_stats.num_re++;
2314 #if defined ENABLE_CHECKING
2315 gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
2316 || is_gimple_min_invariant (cached_lhs));
2317 #endif
2319 if (TREE_CODE (cached_lhs) == ADDR_EXPR
2320 || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
2321 && is_gimple_min_invariant (cached_lhs)))
2322 retval = true;
2324 if (modify_expr_p
2325 && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
2326 TREE_TYPE (cached_lhs)))
2327 cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs);
2329 propagate_tree_value (expr_p, cached_lhs);
2330 mark_stmt_modified (stmt);
2332 return retval;
2335 /* STMT, a MODIFY_EXPR, may create certain equivalences, in either
2336 the available expressions table or the const_and_copies table.
2337 Detect and record those equivalences. */
2339 static void
2340 record_equivalences_from_stmt (tree stmt,
2341 int may_optimize_p,
2342 stmt_ann_t ann)
2344 tree lhs = TREE_OPERAND (stmt, 0);
2345 enum tree_code lhs_code = TREE_CODE (lhs);
2346 int i;
2348 if (lhs_code == SSA_NAME)
2350 tree rhs = TREE_OPERAND (stmt, 1);
2352 /* Strip away any useless type conversions. */
2353 STRIP_USELESS_TYPE_CONVERSION (rhs);
2355 /* If the RHS of the assignment is a constant or another variable that
2356 may be propagated, register it in the CONST_AND_COPIES table. We
2357 do not need to record unwind data for this, since this is a true
2358 assignment and not an equivalence inferred from a comparison. All
2359 uses of this ssa name are dominated by this assignment, so unwinding
2360 just costs time and space. */
2361 if (may_optimize_p
2362 && (TREE_CODE (rhs) == SSA_NAME
2363 || is_gimple_min_invariant (rhs)))
2364 SSA_NAME_VALUE (lhs) = rhs;
2366 if (tree_expr_nonzero_p (rhs))
2367 record_var_is_nonzero (lhs);
2370 /* Look at both sides for pointer dereferences. If we find one, then
2371 the pointer must be nonnull and we can enter that equivalence into
2372 the hash tables. */
2373 if (flag_delete_null_pointer_checks)
2374 for (i = 0; i < 2; i++)
2376 tree t = TREE_OPERAND (stmt, i);
2378 /* Strip away any COMPONENT_REFs. */
2379 while (TREE_CODE (t) == COMPONENT_REF)
2380 t = TREE_OPERAND (t, 0);
2382 /* Now see if this is a pointer dereference. */
2383 if (INDIRECT_REF_P (t))
2385 tree op = TREE_OPERAND (t, 0);
2387 /* If the pointer is a SSA variable, then enter new
2388 equivalences into the hash table. */
2389 while (TREE_CODE (op) == SSA_NAME)
2391 tree def = SSA_NAME_DEF_STMT (op);
2393 record_var_is_nonzero (op);
2395 /* And walk up the USE-DEF chains noting other SSA_NAMEs
2396 which are known to have a nonzero value. */
2397 if (def
2398 && TREE_CODE (def) == MODIFY_EXPR
2399 && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
2400 op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
2401 else
2402 break;
2407 /* A memory store, even an aliased store, creates a useful
2408 equivalence. By exchanging the LHS and RHS, creating suitable
2409 vops and recording the result in the available expression table,
2410 we may be able to expose more redundant loads. */
2411 if (!ann->has_volatile_ops
2412 && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
2413 || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
2414 && !is_gimple_reg (lhs))
2416 tree rhs = TREE_OPERAND (stmt, 1);
2417 tree new;
2419 /* FIXME: If the LHS of the assignment is a bitfield and the RHS
2420 is a constant, we need to adjust the constant to fit into the
2421 type of the LHS. If the LHS is a bitfield and the RHS is not
2422 a constant, then we can not record any equivalences for this
2423 statement since we would need to represent the widening or
2424 narrowing of RHS. This fixes gcc.c-torture/execute/921016-1.c
2425 and should not be necessary if GCC represented bitfields
2426 properly. */
2427 if (lhs_code == COMPONENT_REF
2428 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
2430 if (TREE_CONSTANT (rhs))
2431 rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
2432 else
2433 rhs = NULL;
2435 /* If the value overflowed, then we can not use this equivalence. */
2436 if (rhs && ! is_gimple_min_invariant (rhs))
2437 rhs = NULL;
2440 if (rhs)
2442 /* Build a new statement with the RHS and LHS exchanged. */
2443 new = build2 (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
2445 create_ssa_artficial_load_stmt (new, stmt);
2447 /* Finally enter the statement into the available expression
2448 table. */
2449 lookup_avail_expr (new, true);
2454 /* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2455 CONST_AND_COPIES. */
2457 static bool
2458 cprop_operand (tree stmt, use_operand_p op_p)
2460 bool may_have_exposed_new_symbols = false;
2461 tree val;
2462 tree op = USE_FROM_PTR (op_p);
2464 /* If the operand has a known constant value or it is known to be a
2465 copy of some other variable, use the value or copy stored in
2466 CONST_AND_COPIES. */
2467 val = SSA_NAME_VALUE (op);
2468 if (val && val != op && TREE_CODE (val) != VALUE_HANDLE)
2470 tree op_type, val_type;
2472 /* Do not change the base variable in the virtual operand
2473 tables. That would make it impossible to reconstruct
2474 the renamed virtual operand if we later modify this
2475 statement. Also only allow the new value to be an SSA_NAME
2476 for propagation into virtual operands. */
2477 if (!is_gimple_reg (op)
2478 && (TREE_CODE (val) != SSA_NAME
2479 || is_gimple_reg (val)
2480 || get_virtual_var (val) != get_virtual_var (op)))
2481 return false;
2483 /* Do not replace hard register operands in asm statements. */
2484 if (TREE_CODE (stmt) == ASM_EXPR
2485 && !may_propagate_copy_into_asm (op))
2486 return false;
2488 /* Get the toplevel type of each operand. */
2489 op_type = TREE_TYPE (op);
2490 val_type = TREE_TYPE (val);
2492 /* While both types are pointers, get the type of the object
2493 pointed to. */
2494 while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2496 op_type = TREE_TYPE (op_type);
2497 val_type = TREE_TYPE (val_type);
2500 /* Make sure underlying types match before propagating a constant by
2501 converting the constant to the proper type. Note that convert may
2502 return a non-gimple expression, in which case we ignore this
2503 propagation opportunity. */
2504 if (TREE_CODE (val) != SSA_NAME)
2506 if (!lang_hooks.types_compatible_p (op_type, val_type))
2508 val = fold_convert (TREE_TYPE (op), val);
2509 if (!is_gimple_min_invariant (val))
2510 return false;
2514 /* Certain operands are not allowed to be copy propagated due
2515 to their interaction with exception handling and some GCC
2516 extensions. */
2517 else if (!may_propagate_copy (op, val))
2518 return false;
2520 /* Do not propagate copies if the propagated value is at a deeper loop
2521 depth than the propagatee. Otherwise, this may move loop variant
2522 variables outside of their loops and prevent coalescing
2523 opportunities. If the value was loop invariant, it will be hoisted
2524 by LICM and exposed for copy propagation. */
2525 if (loop_depth_of_name (val) > loop_depth_of_name (op))
2526 return false;
2528 /* Dump details. */
2529 if (dump_file && (dump_flags & TDF_DETAILS))
2531 fprintf (dump_file, " Replaced '");
2532 print_generic_expr (dump_file, op, dump_flags);
2533 fprintf (dump_file, "' with %s '",
2534 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2535 print_generic_expr (dump_file, val, dump_flags);
2536 fprintf (dump_file, "'\n");
2539 /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2540 that we may have exposed a new symbol for SSA renaming. */
2541 if (TREE_CODE (val) == ADDR_EXPR
2542 || (POINTER_TYPE_P (TREE_TYPE (op))
2543 && is_gimple_min_invariant (val)))
2544 may_have_exposed_new_symbols = true;
2546 if (TREE_CODE (val) != SSA_NAME)
2547 opt_stats.num_const_prop++;
2548 else
2549 opt_stats.num_copy_prop++;
2551 propagate_value (op_p, val);
2553 /* And note that we modified this statement. This is now
2554 safe, even if we changed virtual operands since we will
2555 rescan the statement and rewrite its operands again. */
2556 mark_stmt_modified (stmt);
2558 return may_have_exposed_new_symbols;
2561 /* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2562 known value for that SSA_NAME (or NULL if no value is known).
2564 Propagate values from CONST_AND_COPIES into the uses, vuses and
2565 v_may_def_ops of STMT. */
2567 static bool
2568 cprop_into_stmt (tree stmt)
2570 bool may_have_exposed_new_symbols = false;
2571 use_operand_p op_p;
2572 ssa_op_iter iter;
2574 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2576 if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2577 may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
2580 return may_have_exposed_new_symbols;
2584 /* Optimize the statement pointed to by iterator SI.
2586 We try to perform some simplistic global redundancy elimination and
2587 constant propagation:
2589 1- To detect global redundancy, we keep track of expressions that have
2590 been computed in this block and its dominators. If we find that the
2591 same expression is computed more than once, we eliminate repeated
2592 computations by using the target of the first one.
2594 2- Constant values and copy assignments. This is used to do very
2595 simplistic constant and copy propagation. When a constant or copy
2596 assignment is found, we map the value on the RHS of the assignment to
2597 the variable in the LHS in the CONST_AND_COPIES table. */
2599 static void
2600 optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2601 basic_block bb, block_stmt_iterator si)
2603 stmt_ann_t ann;
2604 tree stmt, old_stmt;
2605 bool may_optimize_p;
2606 bool may_have_exposed_new_symbols = false;
2608 old_stmt = stmt = bsi_stmt (si);
2610 if (TREE_CODE (stmt) == COND_EXPR)
2611 canonicalize_comparison (stmt);
2613 update_stmt_if_modified (stmt);
2614 ann = stmt_ann (stmt);
2615 opt_stats.num_stmts++;
2616 may_have_exposed_new_symbols = false;
2618 if (dump_file && (dump_flags & TDF_DETAILS))
2620 fprintf (dump_file, "Optimizing statement ");
2621 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2624 /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */
2625 may_have_exposed_new_symbols = cprop_into_stmt (stmt);
2627 /* If the statement has been modified with constant replacements,
2628 fold its RHS before checking for redundant computations. */
2629 if (ann->modified)
2631 tree rhs;
2633 /* Try to fold the statement making sure that STMT is kept
2634 up to date. */
2635 if (fold_stmt (bsi_stmt_ptr (si)))
2637 stmt = bsi_stmt (si);
2638 ann = stmt_ann (stmt);
2640 if (dump_file && (dump_flags & TDF_DETAILS))
2642 fprintf (dump_file, " Folded to: ");
2643 print_generic_stmt (dump_file, stmt, TDF_SLIM);
2647 rhs = get_rhs (stmt);
2648 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2649 recompute_tree_invariant_for_addr_expr (rhs);
2651 /* Constant/copy propagation above may change the set of
2652 virtual operands associated with this statement. Folding
2653 may remove the need for some virtual operands.
2655 Indicate we will need to rescan and rewrite the statement. */
2656 may_have_exposed_new_symbols = true;
2659 /* Check for redundant computations. Do this optimization only
2660 for assignments that have no volatile ops and conditionals. */
2661 may_optimize_p = (!ann->has_volatile_ops
2662 && ((TREE_CODE (stmt) == RETURN_EXPR
2663 && TREE_OPERAND (stmt, 0)
2664 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
2665 && ! (TREE_SIDE_EFFECTS
2666 (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
2667 || (TREE_CODE (stmt) == MODIFY_EXPR
2668 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
2669 || TREE_CODE (stmt) == COND_EXPR
2670 || TREE_CODE (stmt) == SWITCH_EXPR));
2672 if (may_optimize_p)
2673 may_have_exposed_new_symbols |= eliminate_redundant_computations (stmt);
2675 /* Record any additional equivalences created by this statement. */
2676 if (TREE_CODE (stmt) == MODIFY_EXPR)
2677 record_equivalences_from_stmt (stmt,
2678 may_optimize_p,
2679 ann);
2681 /* If STMT is a COND_EXPR and it was modified, then we may know
2682 where it goes. If that is the case, then mark the CFG as altered.
2684 This will cause us to later call remove_unreachable_blocks and
2685 cleanup_tree_cfg when it is safe to do so. It is not safe to
2686 clean things up here since removal of edges and such can trigger
2687 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2688 the manager.
2690 That's all fine and good, except that once SSA_NAMEs are released
2691 to the manager, we must not call create_ssa_name until all references
2692 to released SSA_NAMEs have been eliminated.
2694 All references to the deleted SSA_NAMEs can not be eliminated until
2695 we remove unreachable blocks.
2697 We can not remove unreachable blocks until after we have completed
2698 any queued jump threading.
2700 We can not complete any queued jump threads until we have taken
2701 appropriate variables out of SSA form. Taking variables out of
2702 SSA form can call create_ssa_name and thus we lose.
2704 Ultimately I suspect we're going to need to change the interface
2705 into the SSA_NAME manager. */
2707 if (ann->modified)
2709 tree val = NULL;
2711 if (TREE_CODE (stmt) == COND_EXPR)
2712 val = COND_EXPR_COND (stmt);
2713 else if (TREE_CODE (stmt) == SWITCH_EXPR)
2714 val = SWITCH_COND (stmt);
2716 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
2717 cfg_altered = true;
2719 /* If we simplified a statement in such a way as to be shown that it
2720 cannot trap, update the eh information and the cfg to match. */
2721 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
2723 bitmap_set_bit (need_eh_cleanup, bb->index);
2724 if (dump_file && (dump_flags & TDF_DETAILS))
2725 fprintf (dump_file, " Flagged to clear EH edges.\n");
2729 if (may_have_exposed_new_symbols)
2730 VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
2733 /* Search for an existing instance of STMT in the AVAIL_EXPRS table. If
2734 found, return its LHS. Otherwise insert STMT in the table and return
2735 NULL_TREE.
2737 Also, when an expression is first inserted in the AVAIL_EXPRS table, it
2738 is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they
2739 can be removed when we finish processing this block and its children.
2741 NOTE: This function assumes that STMT is a MODIFY_EXPR node that
2742 contains no CALL_EXPR on its RHS and makes no volatile nor
2743 aliased references. */
2745 static tree
2746 lookup_avail_expr (tree stmt, bool insert)
2748 void **slot;
2749 tree lhs;
2750 tree temp;
2751 struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
2753 lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
2755 initialize_hash_element (stmt, lhs, element);
2757 /* Don't bother remembering constant assignments and copy operations.
2758 Constants and copy operations are handled by the constant/copy propagator
2759 in optimize_stmt. */
2760 if (TREE_CODE (element->rhs) == SSA_NAME
2761 || is_gimple_min_invariant (element->rhs))
2763 free (element);
2764 return NULL_TREE;
2767 /* If this is an equality test against zero, see if we have recorded a
2768 nonzero value for the variable in question. */
2769 if ((TREE_CODE (element->rhs) == EQ_EXPR
2770 || TREE_CODE (element->rhs) == NE_EXPR)
2771 && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
2772 && integer_zerop (TREE_OPERAND (element->rhs, 1)))
2774 int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
2776 if (bitmap_bit_p (nonzero_vars, indx))
2778 tree t = element->rhs;
2779 free (element);
2780 return constant_boolean_node (TREE_CODE (t) != EQ_EXPR,
2781 TREE_TYPE (t));
2785 /* Finally try to find the expression in the main expression hash table. */
2786 slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
2787 (insert ? INSERT : NO_INSERT));
2788 if (slot == NULL)
2790 free (element);
2791 return NULL_TREE;
2794 if (*slot == NULL)
2796 *slot = (void *) element;
2797 VEC_safe_push (tree, heap, avail_exprs_stack,
2798 stmt ? stmt : element->rhs);
2799 return NULL_TREE;
2802 /* Extract the LHS of the assignment so that it can be used as the current
2803 definition of another variable. */
2804 lhs = ((struct expr_hash_elt *)*slot)->lhs;
2806 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2807 use the value from the const_and_copies table. */
2808 if (TREE_CODE (lhs) == SSA_NAME)
2810 temp = SSA_NAME_VALUE (lhs);
2811 if (temp && TREE_CODE (temp) != VALUE_HANDLE)
2812 lhs = temp;
2815 free (element);
2816 return lhs;
2819 /* Given a condition COND, record into HI_P, LO_P and INVERTED_P the
2820 range of values that result in the conditional having a true value.
2822 Return true if we are successful in extracting a range from COND and
2823 false if we are unsuccessful. */
2825 static bool
2826 extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
2828 tree op1 = TREE_OPERAND (cond, 1);
2829 tree high, low, type;
2830 int inverted;
2832 type = TREE_TYPE (op1);
2834 /* Experiments have shown that it's rarely, if ever useful to
2835 record ranges for enumerations. Presumably this is due to
2836 the fact that they're rarely used directly. They are typically
2837 cast into an integer type and used that way. */
2838 if (TREE_CODE (type) != INTEGER_TYPE)
2839 return 0;
2841 switch (TREE_CODE (cond))
2843 case EQ_EXPR:
2844 high = low = op1;
2845 inverted = 0;
2846 break;
2848 case NE_EXPR:
2849 high = low = op1;
2850 inverted = 1;
2851 break;
2853 case GE_EXPR:
2854 low = op1;
2856 /* Get the highest value of the type. If not a constant, use that
2857 of its base type, if it has one. */
2858 high = TYPE_MAX_VALUE (type);
2859 if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type))
2860 high = TYPE_MAX_VALUE (TREE_TYPE (type));
2861 inverted = 0;
2862 break;
2864 case GT_EXPR:
2865 high = TYPE_MAX_VALUE (type);
2866 if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type))
2867 high = TYPE_MAX_VALUE (TREE_TYPE (type));
2868 if (!tree_int_cst_lt (op1, high))
2869 return 0;
2870 low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
2871 inverted = 0;
2872 break;
2874 case LE_EXPR:
2875 high = op1;
2876 low = TYPE_MIN_VALUE (type);
2877 if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type))
2878 low = TYPE_MIN_VALUE (TREE_TYPE (type));
2879 inverted = 0;
2880 break;
2882 case LT_EXPR:
2883 low = TYPE_MIN_VALUE (type);
2884 if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type))
2885 low = TYPE_MIN_VALUE (TREE_TYPE (type));
2886 if (!tree_int_cst_lt (low, op1))
2887 return 0;
2888 high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
2889 inverted = 0;
2890 break;
2892 default:
2893 return 0;
2896 *hi_p = high;
2897 *lo_p = low;
2898 *inverted_p = inverted;
2899 return 1;
2902 /* Record a range created by COND for basic block BB. */
2904 static void
2905 record_range (tree cond, basic_block bb)
2907 enum tree_code code = TREE_CODE (cond);
2909 /* We explicitly ignore NE_EXPRs and all the unordered comparisons.
2910 They rarely allow for meaningful range optimizations and significantly
2911 complicate the implementation. */
2912 if ((code == LT_EXPR || code == LE_EXPR || code == GT_EXPR
2913 || code == GE_EXPR || code == EQ_EXPR)
2914 && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
2916 struct vrp_hash_elt *vrp_hash_elt;
2917 struct vrp_element *element;
2918 VEC(vrp_element_p,heap) **vrp_records_p;
2919 void **slot;
2922 vrp_hash_elt = XNEW (struct vrp_hash_elt);
2923 vrp_hash_elt->var = TREE_OPERAND (cond, 0);
2924 vrp_hash_elt->records = NULL;
2925 slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT);
2927 if (*slot == NULL)
2928 *slot = (void *) vrp_hash_elt;
2929 else
2930 vrp_free (vrp_hash_elt);
2932 vrp_hash_elt = (struct vrp_hash_elt *) *slot;
2933 vrp_records_p = &vrp_hash_elt->records;
2935 element = GGC_NEW (struct vrp_element);
2936 element->low = NULL;
2937 element->high = NULL;
2938 element->cond = cond;
2939 element->bb = bb;
2941 VEC_safe_push (vrp_element_p, heap, *vrp_records_p, element);
2942 VEC_safe_push (tree, heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
2946 /* Hashing and equality functions for VRP_DATA.
2948 Since this hash table is addressed by SSA_NAMEs, we can hash on
2949 their version number and equality can be determined with a
2950 pointer comparison. */
2952 static hashval_t
2953 vrp_hash (const void *p)
2955 tree var = ((struct vrp_hash_elt *)p)->var;
2957 return SSA_NAME_VERSION (var);
2960 static int
2961 vrp_eq (const void *p1, const void *p2)
2963 tree var1 = ((struct vrp_hash_elt *)p1)->var;
2964 tree var2 = ((struct vrp_hash_elt *)p2)->var;
2966 return var1 == var2;
2969 /* Hashing and equality functions for AVAIL_EXPRS. The table stores
2970 MODIFY_EXPR statements. We compute a value number for expressions using
2971 the code of the expression and the SSA numbers of its operands. */
2973 static hashval_t
2974 avail_expr_hash (const void *p)
2976 tree stmt = ((struct expr_hash_elt *)p)->stmt;
2977 tree rhs = ((struct expr_hash_elt *)p)->rhs;
2978 tree vuse;
2979 ssa_op_iter iter;
2980 hashval_t val = 0;
2982 /* iterative_hash_expr knows how to deal with any expression and
2983 deals with commutative operators as well, so just use it instead
2984 of duplicating such complexities here. */
2985 val = iterative_hash_expr (rhs, val);
2987 /* If the hash table entry is not associated with a statement, then we
2988 can just hash the expression and not worry about virtual operands
2989 and such. */
2990 if (!stmt || !stmt_ann (stmt))
2991 return val;
2993 /* Add the SSA version numbers of every vuse operand. This is important
2994 because compound variables like arrays are not renamed in the
2995 operands. Rather, the rename is done on the virtual variable
2996 representing all the elements of the array. */
2997 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
2998 val = iterative_hash_expr (vuse, val);
3000 return val;
3003 static hashval_t
3004 real_avail_expr_hash (const void *p)
3006 return ((const struct expr_hash_elt *)p)->hash;
3009 static int
3010 avail_expr_eq (const void *p1, const void *p2)
3012 tree stmt1 = ((struct expr_hash_elt *)p1)->stmt;
3013 tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
3014 tree stmt2 = ((struct expr_hash_elt *)p2)->stmt;
3015 tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
3017 /* If they are the same physical expression, return true. */
3018 if (rhs1 == rhs2 && stmt1 == stmt2)
3019 return true;
3021 /* If their codes are not equal, then quit now. */
3022 if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
3023 return false;
3025 /* In case of a collision, both RHS have to be identical and have the
3026 same VUSE operands. */
3027 if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
3028 || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
3029 && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
3031 bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
3032 gcc_assert (!ret || ((struct expr_hash_elt *)p1)->hash
3033 == ((struct expr_hash_elt *)p2)->hash);
3034 return ret;
3037 return false;