* team.c (gomp_team_end): Free team immediately if it has
[official-gcc.git] / gcc / tree-ssa-threadedge.c
blob462da5d60583c720d60d38165bbb7c326a289d5d
1 /* SSA Jump Threading
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Jeff Law <law@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "ggc.h"
30 #include "basic-block.h"
31 #include "cfgloop.h"
32 #include "output.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "diagnostic.h"
36 #include "timevar.h"
37 #include "tree-dump.h"
38 #include "tree-flow.h"
39 #include "domwalk.h"
40 #include "real.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-propagate.h"
43 #include "langhooks.h"
44 #include "params.h"
46 /* To avoid code explosion due to jump threading, we limit the
47 number of statements we are going to copy. This variable
48 holds the number of statements currently seen that we'll have
49 to copy as part of the jump threading process. */
50 static int stmt_count;
52 /* Return TRUE if we may be able to thread an incoming edge into
53 BB to an outgoing edge from BB. Return FALSE otherwise. */
55 bool
56 potentially_threadable_block (basic_block bb)
58 block_stmt_iterator bsi;
60 /* If BB has a single successor or a single predecessor, then
61 there is no threading opportunity. */
62 if (single_succ_p (bb) || single_pred_p (bb))
63 return false;
65 /* If BB does not end with a conditional, switch or computed goto,
66 then there is no threading opportunity. */
67 bsi = bsi_last (bb);
68 if (bsi_end_p (bsi)
69 || ! bsi_stmt (bsi)
70 || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR
71 && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR
72 && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR))
73 return false;
75 return true;
78 /* Return the LHS of any ASSERT_EXPR where OP appears as the first
79 argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates
80 BB. If no such ASSERT_EXPR is found, return OP. */
82 static tree
83 lhs_of_dominating_assert (tree op, basic_block bb, tree stmt)
85 imm_use_iterator imm_iter;
86 tree use_stmt;
87 use_operand_p use_p;
89 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
91 use_stmt = USE_STMT (use_p);
92 if (use_stmt != stmt
93 && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
94 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == ASSERT_EXPR
95 && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == op
96 && dominated_by_p (CDI_DOMINATORS, bb, bb_for_stmt (use_stmt)))
98 return GIMPLE_STMT_OPERAND (use_stmt, 0);
101 return op;
105 /* We record temporary equivalences created by PHI nodes or
106 statements within the target block. Doing so allows us to
107 identify more jump threading opportunities, even in blocks
108 with side effects.
110 We keep track of those temporary equivalences in a stack
111 structure so that we can unwind them when we're done processing
112 a particular edge. This routine handles unwinding the data
113 structures. */
115 static void
116 remove_temporary_equivalences (VEC(tree, heap) **stack)
118 while (VEC_length (tree, *stack) > 0)
120 tree prev_value, dest;
122 dest = VEC_pop (tree, *stack);
124 /* A NULL value indicates we should stop unwinding, otherwise
125 pop off the next entry as they're recorded in pairs. */
126 if (dest == NULL)
127 break;
129 prev_value = VEC_pop (tree, *stack);
130 SSA_NAME_VALUE (dest) = prev_value;
134 /* Record a temporary equivalence, saving enough information so that
135 we can restore the state of recorded equivalences when we're
136 done processing the current edge. */
138 static void
139 record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
141 tree prev_x = SSA_NAME_VALUE (x);
143 if (TREE_CODE (y) == SSA_NAME)
145 tree tmp = SSA_NAME_VALUE (y);
146 y = tmp ? tmp : y;
149 SSA_NAME_VALUE (x) = y;
150 VEC_reserve (tree, heap, *stack, 2);
151 VEC_quick_push (tree, *stack, prev_x);
152 VEC_quick_push (tree, *stack, x);
155 /* Record temporary equivalences created by PHIs at the target of the
156 edge E. Record unwind information for the equivalences onto STACK.
158 If a PHI which prevents threading is encountered, then return FALSE
159 indicating we should not thread this edge, else return TRUE. */
161 static bool
162 record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
164 tree phi;
166 /* Each PHI creates a temporary equivalence, record them.
167 These are context sensitive equivalences and will be removed
168 later. */
169 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
171 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
172 tree dst = PHI_RESULT (phi);
174 /* If the desired argument is not the same as this PHI's result
175 and it is set by a PHI in E->dest, then we can not thread
176 through E->dest. */
177 if (src != dst
178 && TREE_CODE (src) == SSA_NAME
179 && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
180 && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
181 return false;
183 /* We consider any non-virtual PHI as a statement since it
184 count result in a constant assignment or copy operation. */
185 if (is_gimple_reg (dst))
186 stmt_count++;
188 record_temporary_equivalence (dst, src, stack);
190 return true;
193 /* Try to simplify each statement in E->dest, ultimately leading to
194 a simplification of the COND_EXPR at the end of E->dest.
196 Record unwind information for temporary equivalences onto STACK.
198 Use SIMPLIFY (a pointer to a callback function) to further simplify
199 statements using pass specific information.
201 We might consider marking just those statements which ultimately
202 feed the COND_EXPR. It's not clear if the overhead of bookkeeping
203 would be recovered by trying to simplify fewer statements.
205 If we are able to simplify a statement into the form
206 SSA_NAME = (SSA_NAME | gimple invariant), then we can record
207 a context sensitive equivalency which may help us simplify
208 later statements in E->dest. */
210 static tree
211 record_temporary_equivalences_from_stmts_at_dest (edge e,
212 VEC(tree, heap) **stack,
213 tree (*simplify) (tree,
214 tree))
216 block_stmt_iterator bsi;
217 tree stmt = NULL;
218 int max_stmt_count;
220 max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
222 /* Walk through each statement in the block recording equivalences
223 we discover. Note any equivalences we discover are context
224 sensitive (ie, are dependent on traversing E) and must be unwound
225 when we're finished processing E. */
226 for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
228 tree cached_lhs = NULL;
229 tree rhs;
231 stmt = bsi_stmt (bsi);
233 /* Ignore empty statements and labels. */
234 if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
235 continue;
237 /* If the statement has volatile operands, then we assume we
238 can not thread through this block. This is overly
239 conservative in some ways. */
240 if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt))
241 return NULL;
243 /* If duplicating this block is going to cause too much code
244 expansion, then do not thread through this block. */
245 stmt_count++;
246 if (stmt_count > max_stmt_count)
247 return NULL;
249 /* If this is not a GIMPLE_MODIFY_STMT which sets an SSA_NAME to a new
250 value, then do not try to simplify this statement as it will
251 not simplify in any way that is helpful for jump threading. */
252 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
253 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
254 continue;
256 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
258 /* The result of __builtin_object_size depends on all the arguments
259 of a phi node. Temporarily using only one edge produces invalid
260 results. For example
262 if (x < 6)
263 goto l;
264 else
265 goto l;
268 r = PHI <&w[2].a[1](2), &a.a[6](3)>
269 __builtin_object_size (r, 0)
271 The result of __builtin_object_size is defined to be the maximum of
272 remaining bytes. If we use only one edge on the phi, the result will
273 change to be the remaining bytes for the corresponding phi argument. */
275 if (TREE_CODE (rhs) == CALL_EXPR)
277 tree fndecl = get_callee_fndecl (rhs);
278 if (fndecl && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
279 continue;
282 /* At this point we have a statement which assigns an RHS to an
283 SSA_VAR on the LHS. We want to try and simplify this statement
284 to expose more context sensitive equivalences which in turn may
285 allow us to simplify the condition at the end of the loop.
287 Handle simple copy operations as well as implied copies from
288 ASSERT_EXPRs. */
289 if (TREE_CODE (rhs) == SSA_NAME)
290 cached_lhs = rhs;
291 else if (TREE_CODE (rhs) == ASSERT_EXPR)
292 cached_lhs = TREE_OPERAND (rhs, 0);
293 else
295 /* A statement that is not a trivial copy or ASSERT_EXPR.
296 We're going to temporarily copy propagate the operands
297 and see if that allows us to simplify this statement. */
298 tree *copy, pre_fold_expr;
299 ssa_op_iter iter;
300 use_operand_p use_p;
301 unsigned int num, i = 0;
303 num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
304 copy = XCNEWVEC (tree, num);
306 /* Make a copy of the uses & vuses into USES_COPY, then cprop into
307 the operands. */
308 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
310 tree tmp = NULL;
311 tree use = USE_FROM_PTR (use_p);
313 copy[i++] = use;
314 if (TREE_CODE (use) == SSA_NAME)
315 tmp = SSA_NAME_VALUE (use);
316 if (tmp)
317 SET_USE (use_p, tmp);
320 /* Try to fold/lookup the new expression. Inserting the
321 expression into the hash table is unlikely to help
322 Sadly, we have to handle conditional assignments specially
323 here, because fold expects all the operands of an expression
324 to be folded before the expression itself is folded, but we
325 can't just substitute the folded condition here. */
326 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == COND_EXPR)
328 tree cond = COND_EXPR_COND (GIMPLE_STMT_OPERAND (stmt, 1));
329 cond = fold (cond);
330 if (cond == boolean_true_node)
331 pre_fold_expr = COND_EXPR_THEN (GIMPLE_STMT_OPERAND (stmt, 1));
332 else if (cond == boolean_false_node)
333 pre_fold_expr = COND_EXPR_ELSE (GIMPLE_STMT_OPERAND (stmt, 1));
334 else
335 pre_fold_expr = GIMPLE_STMT_OPERAND (stmt, 1);
337 else
338 pre_fold_expr = GIMPLE_STMT_OPERAND (stmt, 1);
340 if (pre_fold_expr)
342 cached_lhs = fold (pre_fold_expr);
343 if (TREE_CODE (cached_lhs) != SSA_NAME
344 && !is_gimple_min_invariant (cached_lhs))
345 cached_lhs = (*simplify) (stmt, stmt);
348 /* Restore the statement's original uses/defs. */
349 i = 0;
350 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
351 SET_USE (use_p, copy[i++]);
353 free (copy);
356 /* Record the context sensitive equivalence if we were able
357 to simplify this statement. */
358 if (cached_lhs
359 && (TREE_CODE (cached_lhs) == SSA_NAME
360 || is_gimple_min_invariant (cached_lhs)))
361 record_temporary_equivalence (GIMPLE_STMT_OPERAND (stmt, 0),
362 cached_lhs,
363 stack);
365 return stmt;
368 /* Simplify the control statement at the end of the block E->dest.
370 To avoid allocating memory unnecessarily, a scratch COND_EXPR
371 is available to use/clobber in DUMMY_COND.
373 Use SIMPLIFY (a pointer to a callback function) to further simplify
374 a condition using pass specific information.
376 Return the simplified condition or NULL if simplification could
377 not be performed. */
379 static tree
380 simplify_control_stmt_condition (edge e,
381 tree stmt,
382 tree dummy_cond,
383 tree (*simplify) (tree, tree),
384 bool handle_dominating_asserts)
386 tree cond, cached_lhs;
388 if (TREE_CODE (stmt) == COND_EXPR)
389 cond = COND_EXPR_COND (stmt);
390 else if (TREE_CODE (stmt) == GOTO_EXPR)
391 cond = GOTO_DESTINATION (stmt);
392 else
393 cond = SWITCH_COND (stmt);
395 /* For comparisons, we have to update both operands, then try
396 to simplify the comparison. */
397 if (COMPARISON_CLASS_P (cond))
399 tree op0, op1;
400 enum tree_code cond_code;
402 op0 = TREE_OPERAND (cond, 0);
403 op1 = TREE_OPERAND (cond, 1);
404 cond_code = TREE_CODE (cond);
406 /* Get the current value of both operands. */
407 if (TREE_CODE (op0) == SSA_NAME)
409 tree tmp = SSA_NAME_VALUE (op0);
410 if (tmp)
411 op0 = tmp;
414 if (TREE_CODE (op1) == SSA_NAME)
416 tree tmp = SSA_NAME_VALUE (op1);
417 if (tmp)
418 op1 = tmp;
421 if (handle_dominating_asserts)
423 /* Now see if the operand was consumed by an ASSERT_EXPR
424 which dominates E->src. If so, we want to replace the
425 operand with the LHS of the ASSERT_EXPR. */
426 if (TREE_CODE (op0) == SSA_NAME)
427 op0 = lhs_of_dominating_assert (op0, e->src, stmt);
429 if (TREE_CODE (op1) == SSA_NAME)
430 op1 = lhs_of_dominating_assert (op1, e->src, stmt);
433 /* We may need to canonicalize the comparison. For
434 example, op0 might be a constant while op1 is an
435 SSA_NAME. Failure to canonicalize will cause us to
436 miss threading opportunities. */
437 if (cond_code != SSA_NAME
438 && tree_swap_operands_p (op0, op1, false))
440 tree tmp;
441 cond_code = swap_tree_comparison (TREE_CODE (cond));
442 tmp = op0;
443 op0 = op1;
444 op1 = tmp;
447 /* Stuff the operator and operands into our dummy conditional
448 expression. */
449 TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
450 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
451 TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
453 /* We absolutely do not care about any type conversions
454 we only care about a zero/nonzero value. */
455 fold_defer_overflow_warnings ();
457 cached_lhs = fold (COND_EXPR_COND (dummy_cond));
458 while (CONVERT_EXPR_P (cached_lhs))
459 cached_lhs = TREE_OPERAND (cached_lhs, 0);
461 fold_undefer_overflow_warnings (is_gimple_min_invariant (cached_lhs),
462 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
464 /* If we have not simplified the condition down to an invariant,
465 then use the pass specific callback to simplify the condition. */
466 if (! is_gimple_min_invariant (cached_lhs))
467 cached_lhs = (*simplify) (dummy_cond, stmt);
470 /* We can have conditionals which just test the state of a variable
471 rather than use a relational operator. These are simpler to handle. */
472 else if (TREE_CODE (cond) == SSA_NAME)
474 cached_lhs = cond;
476 /* Get the variable's current value from the equivalency chains.
478 It is possible to get loops in the SSA_NAME_VALUE chains
479 (consider threading the backedge of a loop where we have
480 a loop invariant SSA_NAME used in the condition. */
481 if (cached_lhs
482 && TREE_CODE (cached_lhs) == SSA_NAME
483 && SSA_NAME_VALUE (cached_lhs))
484 cached_lhs = SSA_NAME_VALUE (cached_lhs);
486 /* If we're dominated by a suitable ASSERT_EXPR, then
487 update CACHED_LHS appropriately. */
488 if (handle_dominating_asserts && TREE_CODE (cached_lhs) == SSA_NAME)
489 cached_lhs = lhs_of_dominating_assert (cached_lhs, e->src, stmt);
491 /* If we haven't simplified to an invariant yet, then use the
492 pass specific callback to try and simplify it further. */
493 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
494 cached_lhs = (*simplify) (stmt, stmt);
496 else
497 cached_lhs = NULL;
499 return cached_lhs;
502 /* We are exiting E->src, see if E->dest ends with a conditional
503 jump which has a known value when reached via E.
505 Special care is necessary if E is a back edge in the CFG as we
506 may have already recorded equivalences for E->dest into our
507 various tables, including the result of the conditional at
508 the end of E->dest. Threading opportunities are severely
509 limited in that case to avoid short-circuiting the loop
510 incorrectly.
512 Note it is quite common for the first block inside a loop to
513 end with a conditional which is either always true or always
514 false when reached via the loop backedge. Thus we do not want
515 to blindly disable threading across a loop backedge.
517 DUMMY_COND is a shared cond_expr used by condition simplification as scratch,
518 to avoid allocating memory.
520 HANDLE_DOMINATING_ASSERTS is true if we should try to replace operands of
521 the simplified condition with left-hand sides of ASSERT_EXPRs they are
522 used in.
524 STACK is used to undo temporary equivalences created during the walk of
525 E->dest.
527 SIMPLIFY is a pass-specific function used to simplify statements. */
529 void
530 thread_across_edge (tree dummy_cond,
531 edge e,
532 bool handle_dominating_asserts,
533 VEC(tree, heap) **stack,
534 tree (*simplify) (tree, tree))
536 tree stmt;
538 /* If E is a backedge, then we want to verify that the COND_EXPR,
539 SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected
540 by any statements in e->dest. If it is affected, then it is not
541 safe to thread this edge. */
542 if (e->flags & EDGE_DFS_BACK)
544 ssa_op_iter iter;
545 use_operand_p use_p;
546 tree last = bsi_stmt (bsi_last (e->dest));
548 FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE)
550 tree use = USE_FROM_PTR (use_p);
552 if (TREE_CODE (use) == SSA_NAME
553 && TREE_CODE (SSA_NAME_DEF_STMT (use)) != PHI_NODE
554 && bb_for_stmt (SSA_NAME_DEF_STMT (use)) == e->dest)
555 goto fail;
559 stmt_count = 0;
561 /* PHIs create temporary equivalences. */
562 if (!record_temporary_equivalences_from_phis (e, stack))
563 goto fail;
565 /* Now walk each statement recording any context sensitive
566 temporary equivalences we can detect. */
567 stmt = record_temporary_equivalences_from_stmts_at_dest (e, stack, simplify);
568 if (!stmt)
569 goto fail;
571 /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
572 will be taken. */
573 if (TREE_CODE (stmt) == COND_EXPR
574 || TREE_CODE (stmt) == GOTO_EXPR
575 || TREE_CODE (stmt) == SWITCH_EXPR)
577 tree cond;
579 /* Extract and simplify the condition. */
580 cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify, handle_dominating_asserts);
582 if (cond && is_gimple_min_invariant (cond))
584 edge taken_edge = find_taken_edge (e->dest, cond);
585 basic_block dest = (taken_edge ? taken_edge->dest : NULL);
587 if (dest == e->dest)
588 goto fail;
590 remove_temporary_equivalences (stack);
591 register_jump_thread (e, taken_edge);
595 fail:
596 remove_temporary_equivalences (stack);