Fix typo in ChangeLog entry date.
[official-gcc.git] / gcc / tree-ssa-threadedge.c
blob473bc9b90b4068e71a2446fd22ca78a3b8c9a388
1 /* SSA Jump Threading
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Jeff Law <law@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "ggc.h"
30 #include "basic-block.h"
31 #include "cfgloop.h"
32 #include "output.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "diagnostic.h"
36 #include "timevar.h"
37 #include "tree-dump.h"
38 #include "tree-flow.h"
39 #include "domwalk.h"
40 #include "real.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-propagate.h"
43 #include "langhooks.h"
44 #include "params.h"
46 /* To avoid code explosion due to jump threading, we limit the
47 number of statements we are going to copy. This variable
48 holds the number of statements currently seen that we'll have
49 to copy as part of the jump threading process. */
50 static int stmt_count;
52 /* Array to record value-handles per SSA_NAME. */
53 VEC(tree,heap) *ssa_name_values;
55 /* Set the value for the SSA name NAME to VALUE. */
57 void
58 set_ssa_name_value (tree name, tree value)
60 if (SSA_NAME_VERSION (name) >= VEC_length (tree, ssa_name_values))
61 VEC_safe_grow_cleared (tree, heap, ssa_name_values,
62 SSA_NAME_VERSION (name) + 1);
63 VEC_replace (tree, ssa_name_values, SSA_NAME_VERSION (name), value);
66 /* Initialize the per SSA_NAME value-handles array. Returns it. */
67 void
68 threadedge_initialize_values (void)
70 gcc_assert (ssa_name_values == NULL);
71 ssa_name_values = VEC_alloc(tree, heap, num_ssa_names);
74 /* Free the per SSA_NAME value-handle array. */
75 void
76 threadedge_finalize_values (void)
78 VEC_free(tree, heap, ssa_name_values);
81 /* Return TRUE if we may be able to thread an incoming edge into
82 BB to an outgoing edge from BB. Return FALSE otherwise. */
84 bool
85 potentially_threadable_block (basic_block bb)
87 gimple_stmt_iterator gsi;
89 /* If BB has a single successor or a single predecessor, then
90 there is no threading opportunity. */
91 if (single_succ_p (bb) || single_pred_p (bb))
92 return false;
94 /* If BB does not end with a conditional, switch or computed goto,
95 then there is no threading opportunity. */
96 gsi = gsi_last_bb (bb);
97 if (gsi_end_p (gsi)
98 || ! gsi_stmt (gsi)
99 || (gimple_code (gsi_stmt (gsi)) != GIMPLE_COND
100 && gimple_code (gsi_stmt (gsi)) != GIMPLE_GOTO
101 && gimple_code (gsi_stmt (gsi)) != GIMPLE_SWITCH))
102 return false;
104 return true;
107 /* Return the LHS of any ASSERT_EXPR where OP appears as the first
108 argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates
109 BB. If no such ASSERT_EXPR is found, return OP. */
111 static tree
112 lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt)
114 imm_use_iterator imm_iter;
115 gimple use_stmt;
116 use_operand_p use_p;
118 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
120 use_stmt = USE_STMT (use_p);
121 if (use_stmt != stmt
122 && gimple_assign_single_p (use_stmt)
123 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ASSERT_EXPR
124 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == op
125 && dominated_by_p (CDI_DOMINATORS, bb, gimple_bb (use_stmt)))
127 return gimple_assign_lhs (use_stmt);
130 return op;
133 /* We record temporary equivalences created by PHI nodes or
134 statements within the target block. Doing so allows us to
135 identify more jump threading opportunities, even in blocks
136 with side effects.
138 We keep track of those temporary equivalences in a stack
139 structure so that we can unwind them when we're done processing
140 a particular edge. This routine handles unwinding the data
141 structures. */
143 static void
144 remove_temporary_equivalences (VEC(tree, heap) **stack)
146 while (VEC_length (tree, *stack) > 0)
148 tree prev_value, dest;
150 dest = VEC_pop (tree, *stack);
152 /* A NULL value indicates we should stop unwinding, otherwise
153 pop off the next entry as they're recorded in pairs. */
154 if (dest == NULL)
155 break;
157 prev_value = VEC_pop (tree, *stack);
158 set_ssa_name_value (dest, prev_value);
162 /* Record a temporary equivalence, saving enough information so that
163 we can restore the state of recorded equivalences when we're
164 done processing the current edge. */
166 static void
167 record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
169 tree prev_x = SSA_NAME_VALUE (x);
171 if (TREE_CODE (y) == SSA_NAME)
173 tree tmp = SSA_NAME_VALUE (y);
174 y = tmp ? tmp : y;
177 set_ssa_name_value (x, y);
178 VEC_reserve (tree, heap, *stack, 2);
179 VEC_quick_push (tree, *stack, prev_x);
180 VEC_quick_push (tree, *stack, x);
183 /* Record temporary equivalences created by PHIs at the target of the
184 edge E. Record unwind information for the equivalences onto STACK.
186 If a PHI which prevents threading is encountered, then return FALSE
187 indicating we should not thread this edge, else return TRUE. */
189 static bool
190 record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
192 gimple_stmt_iterator gsi;
194 /* Each PHI creates a temporary equivalence, record them.
195 These are context sensitive equivalences and will be removed
196 later. */
197 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
199 gimple phi = gsi_stmt (gsi);
200 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
201 tree dst = gimple_phi_result (phi);
203 /* If the desired argument is not the same as this PHI's result
204 and it is set by a PHI in E->dest, then we can not thread
205 through E->dest. */
206 if (src != dst
207 && TREE_CODE (src) == SSA_NAME
208 && gimple_code (SSA_NAME_DEF_STMT (src)) == GIMPLE_PHI
209 && gimple_bb (SSA_NAME_DEF_STMT (src)) == e->dest)
210 return false;
212 /* We consider any non-virtual PHI as a statement since it
213 count result in a constant assignment or copy operation. */
214 if (is_gimple_reg (dst))
215 stmt_count++;
217 record_temporary_equivalence (dst, src, stack);
219 return true;
222 /* Fold the RHS of an assignment statement and return it as a tree.
223 May return NULL_TREE if no simplification is possible. */
225 static tree
226 fold_assignment_stmt (gimple stmt)
228 enum tree_code subcode = gimple_assign_rhs_code (stmt);
230 switch (get_gimple_rhs_class (subcode))
232 case GIMPLE_SINGLE_RHS:
234 tree rhs = gimple_assign_rhs1 (stmt);
236 if (TREE_CODE (rhs) == COND_EXPR)
238 /* Sadly, we have to handle conditional assignments specially
239 here, because fold expects all the operands of an expression
240 to be folded before the expression itself is folded, but we
241 can't just substitute the folded condition here. */
242 tree cond = fold (COND_EXPR_COND (rhs));
243 if (cond == boolean_true_node)
244 rhs = COND_EXPR_THEN (rhs);
245 else if (cond == boolean_false_node)
246 rhs = COND_EXPR_ELSE (rhs);
249 return fold (rhs);
251 break;
252 case GIMPLE_UNARY_RHS:
254 tree lhs = gimple_assign_lhs (stmt);
255 tree op0 = gimple_assign_rhs1 (stmt);
256 return fold_unary (subcode, TREE_TYPE (lhs), op0);
258 break;
259 case GIMPLE_BINARY_RHS:
261 tree lhs = gimple_assign_lhs (stmt);
262 tree op0 = gimple_assign_rhs1 (stmt);
263 tree op1 = gimple_assign_rhs2 (stmt);
264 return fold_binary (subcode, TREE_TYPE (lhs), op0, op1);
266 break;
267 default:
268 gcc_unreachable ();
272 /* Try to simplify each statement in E->dest, ultimately leading to
273 a simplification of the COND_EXPR at the end of E->dest.
275 Record unwind information for temporary equivalences onto STACK.
277 Use SIMPLIFY (a pointer to a callback function) to further simplify
278 statements using pass specific information.
280 We might consider marking just those statements which ultimately
281 feed the COND_EXPR. It's not clear if the overhead of bookkeeping
282 would be recovered by trying to simplify fewer statements.
284 If we are able to simplify a statement into the form
285 SSA_NAME = (SSA_NAME | gimple invariant), then we can record
286 a context sensitive equivalence which may help us simplify
287 later statements in E->dest. */
289 static gimple
290 record_temporary_equivalences_from_stmts_at_dest (edge e,
291 VEC(tree, heap) **stack,
292 tree (*simplify) (gimple,
293 gimple))
295 gimple stmt = NULL;
296 gimple_stmt_iterator gsi;
297 int max_stmt_count;
299 max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
301 /* Walk through each statement in the block recording equivalences
302 we discover. Note any equivalences we discover are context
303 sensitive (ie, are dependent on traversing E) and must be unwound
304 when we're finished processing E. */
305 for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
307 tree cached_lhs = NULL;
309 stmt = gsi_stmt (gsi);
311 /* Ignore empty statements and labels. */
312 if (gimple_code (stmt) == GIMPLE_NOP || gimple_code (stmt) == GIMPLE_LABEL)
313 continue;
315 /* If the statement has volatile operands, then we assume we
316 can not thread through this block. This is overly
317 conservative in some ways. */
318 if (gimple_code (stmt) == GIMPLE_ASM && gimple_asm_volatile_p (stmt))
319 return NULL;
321 /* If duplicating this block is going to cause too much code
322 expansion, then do not thread through this block. */
323 stmt_count++;
324 if (stmt_count > max_stmt_count)
325 return NULL;
327 /* If this is not a statement that sets an SSA_NAME to a new
328 value, then do not try to simplify this statement as it will
329 not simplify in any way that is helpful for jump threading. */
330 if ((gimple_code (stmt) != GIMPLE_ASSIGN
331 || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
332 && (gimple_code (stmt) != GIMPLE_CALL
333 || gimple_call_lhs (stmt) == NULL_TREE
334 || TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME))
335 continue;
337 /* The result of __builtin_object_size depends on all the arguments
338 of a phi node. Temporarily using only one edge produces invalid
339 results. For example
341 if (x < 6)
342 goto l;
343 else
344 goto l;
347 r = PHI <&w[2].a[1](2), &a.a[6](3)>
348 __builtin_object_size (r, 0)
350 The result of __builtin_object_size is defined to be the maximum of
351 remaining bytes. If we use only one edge on the phi, the result will
352 change to be the remaining bytes for the corresponding phi argument.
354 Similarly for __builtin_constant_p:
356 r = PHI <1(2), 2(3)>
357 __builtin_constant_p (r)
359 Both PHI arguments are constant, but x ? 1 : 2 is still not
360 constant. */
362 if (is_gimple_call (stmt))
364 tree fndecl = gimple_call_fndecl (stmt);
365 if (fndecl
366 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE
367 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P))
368 continue;
371 /* At this point we have a statement which assigns an RHS to an
372 SSA_VAR on the LHS. We want to try and simplify this statement
373 to expose more context sensitive equivalences which in turn may
374 allow us to simplify the condition at the end of the loop.
376 Handle simple copy operations as well as implied copies from
377 ASSERT_EXPRs. */
378 if (gimple_assign_single_p (stmt)
379 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
380 cached_lhs = gimple_assign_rhs1 (stmt);
381 else if (gimple_assign_single_p (stmt)
382 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
383 cached_lhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
384 else
386 /* A statement that is not a trivial copy or ASSERT_EXPR.
387 We're going to temporarily copy propagate the operands
388 and see if that allows us to simplify this statement. */
389 tree *copy;
390 ssa_op_iter iter;
391 use_operand_p use_p;
392 unsigned int num, i = 0;
394 num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
395 copy = XCNEWVEC (tree, num);
397 /* Make a copy of the uses & vuses into USES_COPY, then cprop into
398 the operands. */
399 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
401 tree tmp = NULL;
402 tree use = USE_FROM_PTR (use_p);
404 copy[i++] = use;
405 if (TREE_CODE (use) == SSA_NAME)
406 tmp = SSA_NAME_VALUE (use);
407 if (tmp)
408 SET_USE (use_p, tmp);
411 /* Try to fold/lookup the new expression. Inserting the
412 expression into the hash table is unlikely to help. */
413 if (is_gimple_call (stmt))
414 cached_lhs = fold_call_stmt (stmt, false);
415 else
416 cached_lhs = fold_assignment_stmt (stmt);
418 if (!cached_lhs
419 || (TREE_CODE (cached_lhs) != SSA_NAME
420 && !is_gimple_min_invariant (cached_lhs)))
421 cached_lhs = (*simplify) (stmt, stmt);
423 /* Restore the statement's original uses/defs. */
424 i = 0;
425 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
426 SET_USE (use_p, copy[i++]);
428 free (copy);
431 /* Record the context sensitive equivalence if we were able
432 to simplify this statement. */
433 if (cached_lhs
434 && (TREE_CODE (cached_lhs) == SSA_NAME
435 || is_gimple_min_invariant (cached_lhs)))
436 record_temporary_equivalence (gimple_get_lhs (stmt), cached_lhs, stack);
438 return stmt;
441 /* Simplify the control statement at the end of the block E->dest.
443 To avoid allocating memory unnecessarily, a scratch GIMPLE_COND
444 is available to use/clobber in DUMMY_COND.
446 Use SIMPLIFY (a pointer to a callback function) to further simplify
447 a condition using pass specific information.
449 Return the simplified condition or NULL if simplification could
450 not be performed. */
452 static tree
453 simplify_control_stmt_condition (edge e,
454 gimple stmt,
455 gimple dummy_cond,
456 tree (*simplify) (gimple, gimple),
457 bool handle_dominating_asserts)
459 tree cond, cached_lhs;
460 enum gimple_code code = gimple_code (stmt);
462 /* For comparisons, we have to update both operands, then try
463 to simplify the comparison. */
464 if (code == GIMPLE_COND)
466 tree op0, op1;
467 enum tree_code cond_code;
469 op0 = gimple_cond_lhs (stmt);
470 op1 = gimple_cond_rhs (stmt);
471 cond_code = gimple_cond_code (stmt);
473 /* Get the current value of both operands. */
474 if (TREE_CODE (op0) == SSA_NAME)
476 tree tmp = SSA_NAME_VALUE (op0);
477 if (tmp)
478 op0 = tmp;
481 if (TREE_CODE (op1) == SSA_NAME)
483 tree tmp = SSA_NAME_VALUE (op1);
484 if (tmp)
485 op1 = tmp;
488 if (handle_dominating_asserts)
490 /* Now see if the operand was consumed by an ASSERT_EXPR
491 which dominates E->src. If so, we want to replace the
492 operand with the LHS of the ASSERT_EXPR. */
493 if (TREE_CODE (op0) == SSA_NAME)
494 op0 = lhs_of_dominating_assert (op0, e->src, stmt);
496 if (TREE_CODE (op1) == SSA_NAME)
497 op1 = lhs_of_dominating_assert (op1, e->src, stmt);
500 /* We may need to canonicalize the comparison. For
501 example, op0 might be a constant while op1 is an
502 SSA_NAME. Failure to canonicalize will cause us to
503 miss threading opportunities. */
504 if (tree_swap_operands_p (op0, op1, false))
506 tree tmp;
507 cond_code = swap_tree_comparison (cond_code);
508 tmp = op0;
509 op0 = op1;
510 op1 = tmp;
513 /* Stuff the operator and operands into our dummy conditional
514 expression. */
515 gimple_cond_set_code (dummy_cond, cond_code);
516 gimple_cond_set_lhs (dummy_cond, op0);
517 gimple_cond_set_rhs (dummy_cond, op1);
519 /* We absolutely do not care about any type conversions
520 we only care about a zero/nonzero value. */
521 fold_defer_overflow_warnings ();
523 cached_lhs = fold_binary (cond_code, boolean_type_node, op0, op1);
524 if (cached_lhs)
525 while (CONVERT_EXPR_P (cached_lhs))
526 cached_lhs = TREE_OPERAND (cached_lhs, 0);
528 fold_undefer_overflow_warnings ((cached_lhs
529 && is_gimple_min_invariant (cached_lhs)),
530 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
532 /* If we have not simplified the condition down to an invariant,
533 then use the pass specific callback to simplify the condition. */
534 if (!cached_lhs
535 || !is_gimple_min_invariant (cached_lhs))
536 cached_lhs = (*simplify) (dummy_cond, stmt);
538 return cached_lhs;
541 if (code == GIMPLE_SWITCH)
542 cond = gimple_switch_index (stmt);
543 else if (code == GIMPLE_GOTO)
544 cond = gimple_goto_dest (stmt);
545 else
546 gcc_unreachable ();
548 /* We can have conditionals which just test the state of a variable
549 rather than use a relational operator. These are simpler to handle. */
550 if (TREE_CODE (cond) == SSA_NAME)
552 cached_lhs = cond;
554 /* Get the variable's current value from the equivalence chains.
556 It is possible to get loops in the SSA_NAME_VALUE chains
557 (consider threading the backedge of a loop where we have
558 a loop invariant SSA_NAME used in the condition. */
559 if (cached_lhs
560 && TREE_CODE (cached_lhs) == SSA_NAME
561 && SSA_NAME_VALUE (cached_lhs))
562 cached_lhs = SSA_NAME_VALUE (cached_lhs);
564 /* If we're dominated by a suitable ASSERT_EXPR, then
565 update CACHED_LHS appropriately. */
566 if (handle_dominating_asserts && TREE_CODE (cached_lhs) == SSA_NAME)
567 cached_lhs = lhs_of_dominating_assert (cached_lhs, e->src, stmt);
569 /* If we haven't simplified to an invariant yet, then use the
570 pass specific callback to try and simplify it further. */
571 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
572 cached_lhs = (*simplify) (stmt, stmt);
574 else
575 cached_lhs = NULL;
577 return cached_lhs;
580 /* We are exiting E->src, see if E->dest ends with a conditional
581 jump which has a known value when reached via E.
583 Special care is necessary if E is a back edge in the CFG as we
584 may have already recorded equivalences for E->dest into our
585 various tables, including the result of the conditional at
586 the end of E->dest. Threading opportunities are severely
587 limited in that case to avoid short-circuiting the loop
588 incorrectly.
590 Note it is quite common for the first block inside a loop to
591 end with a conditional which is either always true or always
592 false when reached via the loop backedge. Thus we do not want
593 to blindly disable threading across a loop backedge.
595 DUMMY_COND is a shared cond_expr used by condition simplification as scratch,
596 to avoid allocating memory.
598 HANDLE_DOMINATING_ASSERTS is true if we should try to replace operands of
599 the simplified condition with left-hand sides of ASSERT_EXPRs they are
600 used in.
602 STACK is used to undo temporary equivalences created during the walk of
603 E->dest.
605 SIMPLIFY is a pass-specific function used to simplify statements. */
607 void
608 thread_across_edge (gimple dummy_cond,
609 edge e,
610 bool handle_dominating_asserts,
611 VEC(tree, heap) **stack,
612 tree (*simplify) (gimple, gimple))
614 gimple stmt;
616 /* If E is a backedge, then we want to verify that the COND_EXPR,
617 SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected
618 by any statements in e->dest. If it is affected, then it is not
619 safe to thread this edge. */
620 if (e->flags & EDGE_DFS_BACK)
622 ssa_op_iter iter;
623 use_operand_p use_p;
624 gimple last = gsi_stmt (gsi_last_bb (e->dest));
626 FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE)
628 tree use = USE_FROM_PTR (use_p);
630 if (TREE_CODE (use) == SSA_NAME
631 && gimple_code (SSA_NAME_DEF_STMT (use)) != GIMPLE_PHI
632 && gimple_bb (SSA_NAME_DEF_STMT (use)) == e->dest)
633 goto fail;
637 stmt_count = 0;
639 /* PHIs create temporary equivalences. */
640 if (!record_temporary_equivalences_from_phis (e, stack))
641 goto fail;
643 /* Now walk each statement recording any context sensitive
644 temporary equivalences we can detect. */
645 stmt = record_temporary_equivalences_from_stmts_at_dest (e, stack, simplify);
646 if (!stmt)
647 goto fail;
649 /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
650 will be taken. */
651 if (gimple_code (stmt) == GIMPLE_COND
652 || gimple_code (stmt) == GIMPLE_GOTO
653 || gimple_code (stmt) == GIMPLE_SWITCH)
655 tree cond;
657 /* Extract and simplify the condition. */
658 cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify, handle_dominating_asserts);
660 if (cond && is_gimple_min_invariant (cond))
662 edge taken_edge = find_taken_edge (e->dest, cond);
663 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
665 if (dest == e->dest)
666 goto fail;
668 remove_temporary_equivalences (stack);
669 register_jump_thread (e, taken_edge);
673 fail:
674 remove_temporary_equivalences (stack);