2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jeff Law <law@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
31 #include "basic-block.h"
36 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-flow.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-propagate.h"
44 #include "langhooks.h"
47 /* To avoid code explosion due to jump threading, we limit the
48 number of statements we are going to copy. This variable
49 holds the number of statements currently seen that we'll have
50 to copy as part of the jump threading process. */
51 static int stmt_count
;
53 /* Return TRUE if we may be able to thread an incoming edge into
54 BB to an outgoing edge from BB. Return FALSE otherwise. */
57 potentially_threadable_block (basic_block bb
)
59 block_stmt_iterator bsi
;
61 /* If BB has a single successor or a single predecessor, then
62 there is no threading opportunity. */
63 if (single_succ_p (bb
) || single_pred_p (bb
))
66 /* If BB does not end with a conditional, switch or computed goto,
67 then there is no threading opportunity. */
71 || (TREE_CODE (bsi_stmt (bsi
)) != COND_EXPR
72 && TREE_CODE (bsi_stmt (bsi
)) != GOTO_EXPR
73 && TREE_CODE (bsi_stmt (bsi
)) != SWITCH_EXPR
))
79 /* Return the LHS of any ASSERT_EXPR where OP appears as the first
80 argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates
81 BB. If no such ASSERT_EXPR is found, return OP. */
84 lhs_of_dominating_assert (tree op
, basic_block bb
, tree stmt
)
86 imm_use_iterator imm_iter
;
90 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
92 use_stmt
= USE_STMT (use_p
);
94 && TREE_CODE (use_stmt
) == MODIFY_EXPR
95 && TREE_CODE (TREE_OPERAND (use_stmt
, 1)) == ASSERT_EXPR
96 && TREE_OPERAND (TREE_OPERAND (use_stmt
, 1), 0) == op
97 && dominated_by_p (CDI_DOMINATORS
, bb
, bb_for_stmt (use_stmt
)))
99 return TREE_OPERAND (use_stmt
, 0);
106 /* We record temporary equivalences created by PHI nodes or
107 statements within the target block. Doing so allows us to
108 identify more jump threading opportunities, even in blocks
111 We keep track of those temporary equivalences in a stack
112 structure so that we can unwind them when we're done processing
113 a particular edge. This routine handles unwinding the data
117 remove_temporary_equivalences (VEC(tree
, heap
) **stack
)
119 while (VEC_length (tree
, *stack
) > 0)
121 tree prev_value
, dest
;
123 dest
= VEC_pop (tree
, *stack
);
125 /* A NULL value indicates we should stop unwinding, otherwise
126 pop off the next entry as they're recorded in pairs. */
130 prev_value
= VEC_pop (tree
, *stack
);
131 SSA_NAME_VALUE (dest
) = prev_value
;
135 /* Record a temporary equivalence, saving enough information so that
136 we can restore the state of recorded equivalences when we're
137 done processing the current edge. */
140 record_temporary_equivalence (tree x
, tree y
, VEC(tree
, heap
) **stack
)
142 tree prev_x
= SSA_NAME_VALUE (x
);
144 if (TREE_CODE (y
) == SSA_NAME
)
146 tree tmp
= SSA_NAME_VALUE (y
);
150 SSA_NAME_VALUE (x
) = y
;
151 VEC_reserve (tree
, heap
, *stack
, 2);
152 VEC_quick_push (tree
, *stack
, prev_x
);
153 VEC_quick_push (tree
, *stack
, x
);
156 /* Record temporary equivalences created by PHIs at the target of the
157 edge E. Record unwind information for the equivalences onto STACK.
159 If a PHI which prevents threading is encountered, then return FALSE
160 indicating we should not thread this edge, else return TRUE. */
163 record_temporary_equivalences_from_phis (edge e
, VEC(tree
, heap
) **stack
)
167 /* Each PHI creates a temporary equivalence, record them.
168 These are context sensitive equivalences and will be removed
170 for (phi
= phi_nodes (e
->dest
); phi
; phi
= PHI_CHAIN (phi
))
172 tree src
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
173 tree dst
= PHI_RESULT (phi
);
175 /* If the desired argument is not the same as this PHI's result
176 and it is set by a PHI in E->dest, then we can not thread
179 && TREE_CODE (src
) == SSA_NAME
180 && TREE_CODE (SSA_NAME_DEF_STMT (src
)) == PHI_NODE
181 && bb_for_stmt (SSA_NAME_DEF_STMT (src
)) == e
->dest
)
184 /* We consider any non-virtual PHI as a statement since it
185 count result in a constant assignment or copy operation. */
186 if (is_gimple_reg (dst
))
189 record_temporary_equivalence (dst
, src
, stack
);
194 /* Try to simplify each statement in E->dest, ultimately leading to
195 a simplification of the COND_EXPR at the end of E->dest.
197 Record unwind information for temporary equivalences onto STACK.
199 Use SIMPLIFY (a pointer to a callback function) to further simplify
200 statements using pass specific information.
202 We might consider marking just those statements which ultimately
203 feed the COND_EXPR. It's not clear if the overhead of bookkeeping
204 would be recovered by trying to simplify fewer statements.
206 If we are able to simplify a statement into the form
207 SSA_NAME = (SSA_NAME | gimple invariant), then we can record
208 a context sensitive equivalency which may help us simplify
209 later statements in E->dest. */
212 record_temporary_equivalences_from_stmts_at_dest (edge e
,
213 VEC(tree
, heap
) **stack
,
214 tree (*simplify
) (tree
))
216 block_stmt_iterator bsi
;
220 max_stmt_count
= PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS
);
222 /* Walk through each statement in the block recording equivalences
223 we discover. Note any equivalences we discover are context
224 sensitive (ie, are dependent on traversing E) and must be unwound
225 when we're finished processing E. */
226 for (bsi
= bsi_start (e
->dest
); ! bsi_end_p (bsi
); bsi_next (&bsi
))
228 tree cached_lhs
= NULL
;
230 stmt
= bsi_stmt (bsi
);
232 /* Ignore empty statements and labels. */
233 if (IS_EMPTY_STMT (stmt
) || TREE_CODE (stmt
) == LABEL_EXPR
)
236 /* If the statement has volatile operands, then we assume we
237 can not thread through this block. This is overly
238 conservative in some ways. */
239 if (TREE_CODE (stmt
) == ASM_EXPR
&& ASM_VOLATILE_P (stmt
))
242 /* If duplicating this block is going to cause too much code
243 expansion, then do not thread through this block. */
245 if (stmt_count
> max_stmt_count
)
248 /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
249 value, then do not try to simplify this statement as it will
250 not simplify in any way that is helpful for jump threading. */
251 if (TREE_CODE (stmt
) != MODIFY_EXPR
252 || TREE_CODE (TREE_OPERAND (stmt
, 0)) != SSA_NAME
)
255 /* At this point we have a statement which assigns an RHS to an
256 SSA_VAR on the LHS. We want to try and simplify this statement
257 to expose more context sensitive equivalences which in turn may
258 allow us to simplify the condition at the end of the loop.
260 Handle simple copy operations as well as implied copies from
262 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == SSA_NAME
)
263 cached_lhs
= TREE_OPERAND (stmt
, 1);
264 else if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == ASSERT_EXPR
)
265 cached_lhs
= TREE_OPERAND (TREE_OPERAND (stmt
, 1), 0);
268 /* A statement that is not a trivial copy or ASSERT_EXPR.
269 We're going to temporarily copy propagate the operands
270 and see if that allows us to simplify this statement. */
271 tree
*copy
, pre_fold_expr
;
274 unsigned int num
, i
= 0;
276 num
= NUM_SSA_OPERANDS (stmt
, (SSA_OP_USE
| SSA_OP_VUSE
));
277 copy
= XCNEWVEC (tree
, num
);
279 /* Make a copy of the uses & vuses into USES_COPY, then cprop into
281 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
| SSA_OP_VUSE
)
284 tree use
= USE_FROM_PTR (use_p
);
287 if (TREE_CODE (use
) == SSA_NAME
)
288 tmp
= SSA_NAME_VALUE (use
);
289 if (tmp
&& TREE_CODE (tmp
) != VALUE_HANDLE
)
290 SET_USE (use_p
, tmp
);
293 /* Try to fold/lookup the new expression. Inserting the
294 expression into the hash table is unlikely to help
295 Sadly, we have to handle conditional assignments specially
296 here, because fold expects all the operands of an expression
297 to be folded before the expression itself is folded, but we
298 can't just substitute the folded condition here. */
299 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == COND_EXPR
)
301 tree cond
= COND_EXPR_COND (TREE_OPERAND (stmt
, 1));
303 if (cond
== boolean_true_node
)
304 pre_fold_expr
= COND_EXPR_THEN (TREE_OPERAND (stmt
, 1));
305 else if (cond
== boolean_false_node
)
306 pre_fold_expr
= COND_EXPR_ELSE (TREE_OPERAND (stmt
, 1));
308 pre_fold_expr
= TREE_OPERAND (stmt
, 1);
311 pre_fold_expr
= TREE_OPERAND (stmt
, 1);
315 cached_lhs
= fold (pre_fold_expr
);
316 if (TREE_CODE (cached_lhs
) != SSA_NAME
317 && !is_gimple_min_invariant (cached_lhs
))
318 cached_lhs
= (*simplify
) (stmt
);
321 /* Restore the statement's original uses/defs. */
323 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
| SSA_OP_VUSE
)
324 SET_USE (use_p
, copy
[i
++]);
329 /* Record the context sensitive equivalence if we were able
330 to simplify this statement. */
332 && (TREE_CODE (cached_lhs
) == SSA_NAME
333 || is_gimple_min_invariant (cached_lhs
)))
334 record_temporary_equivalence (TREE_OPERAND (stmt
, 0),
341 /* Simplify the control statement at the end of the block E->dest.
343 To avoid allocating memory unnecessarily, a scratch COND_EXPR
344 is available to use/clobber in DUMMY_COND.
346 Use SIMPLIFY (a pointer to a callback function) to further simplify
347 a condition using pass specific information.
349 Return the simplified condition or NULL if simplification could
353 simplify_control_stmt_condition (edge e
,
356 tree (*simplify
) (tree
),
357 bool handle_dominating_asserts
)
359 tree cond
, cached_lhs
;
361 if (TREE_CODE (stmt
) == COND_EXPR
)
362 cond
= COND_EXPR_COND (stmt
);
363 else if (TREE_CODE (stmt
) == GOTO_EXPR
)
364 cond
= GOTO_DESTINATION (stmt
);
366 cond
= SWITCH_COND (stmt
);
368 /* For comparisons, we have to update both operands, then try
369 to simplify the comparison. */
370 if (COMPARISON_CLASS_P (cond
))
373 enum tree_code cond_code
;
375 op0
= TREE_OPERAND (cond
, 0);
376 op1
= TREE_OPERAND (cond
, 1);
377 cond_code
= TREE_CODE (cond
);
379 /* Get the current value of both operands. */
380 if (TREE_CODE (op0
) == SSA_NAME
)
382 tree tmp
= SSA_NAME_VALUE (op0
);
383 if (tmp
&& TREE_CODE (tmp
) != VALUE_HANDLE
)
387 if (TREE_CODE (op1
) == SSA_NAME
)
389 tree tmp
= SSA_NAME_VALUE (op1
);
390 if (tmp
&& TREE_CODE (tmp
) != VALUE_HANDLE
)
394 if (handle_dominating_asserts
)
396 /* Now see if the operand was consumed by an ASSERT_EXPR
397 which dominates E->src. If so, we want to replace the
398 operand with the LHS of the ASSERT_EXPR. */
399 if (TREE_CODE (op0
) == SSA_NAME
)
400 op0
= lhs_of_dominating_assert (op0
, e
->src
, stmt
);
402 if (TREE_CODE (op1
) == SSA_NAME
)
403 op1
= lhs_of_dominating_assert (op1
, e
->src
, stmt
);
406 /* We may need to canonicalize the comparison. For
407 example, op0 might be a constant while op1 is an
408 SSA_NAME. Failure to canonicalize will cause us to
409 miss threading opportunities. */
410 if (cond_code
!= SSA_NAME
411 && tree_swap_operands_p (op0
, op1
, false))
414 cond_code
= swap_tree_comparison (TREE_CODE (cond
));
420 /* Stuff the operator and operands into our dummy conditional
422 TREE_SET_CODE (COND_EXPR_COND (dummy_cond
), cond_code
);
423 TREE_OPERAND (COND_EXPR_COND (dummy_cond
), 0) = op0
;
424 TREE_OPERAND (COND_EXPR_COND (dummy_cond
), 1) = op1
;
426 /* We absolutely do not care about any type conversions
427 we only care about a zero/nonzero value. */
428 cached_lhs
= fold (COND_EXPR_COND (dummy_cond
));
429 while (TREE_CODE (cached_lhs
) == NOP_EXPR
430 || TREE_CODE (cached_lhs
) == CONVERT_EXPR
431 || TREE_CODE (cached_lhs
) == NON_LVALUE_EXPR
)
432 cached_lhs
= TREE_OPERAND (cached_lhs
, 0);
434 /* If we have not simplified the condition down to an invariant,
435 then use the pass specific callback to simplify the condition. */
436 if (! is_gimple_min_invariant (cached_lhs
))
437 cached_lhs
= (*simplify
) (dummy_cond
);
440 /* We can have conditionals which just test the state of a variable
441 rather than use a relational operator. These are simpler to handle. */
442 else if (TREE_CODE (cond
) == SSA_NAME
)
446 /* Get the variable's current value from the equivalency chains.
448 It is possible to get loops in the SSA_NAME_VALUE chains
449 (consider threading the backedge of a loop where we have
450 a loop invariant SSA_NAME used in the condition. */
452 && TREE_CODE (cached_lhs
) == SSA_NAME
453 && SSA_NAME_VALUE (cached_lhs
))
454 cached_lhs
= SSA_NAME_VALUE (cached_lhs
);
456 /* If we're dominated by a suitable ASSERT_EXPR, then
457 update CACHED_LHS appropriately. */
458 if (handle_dominating_asserts
&& TREE_CODE (cached_lhs
) == SSA_NAME
)
459 cached_lhs
= lhs_of_dominating_assert (cached_lhs
, e
->src
, stmt
);
461 /* If we haven't simplified to an invariant yet, then use the
462 pass specific callback to try and simplify it further. */
463 if (cached_lhs
&& ! is_gimple_min_invariant (cached_lhs
))
464 cached_lhs
= (*simplify
) (stmt
);
472 /* We are exiting E->src, see if E->dest ends with a conditional
473 jump which has a known value when reached via E.
475 Special care is necessary if E is a back edge in the CFG as we
476 may have already recorded equivalences for E->dest into our
477 various tables, including the result of the conditional at
478 the end of E->dest. Threading opportunities are severely
479 limited in that case to avoid short-circuiting the loop
482 Note it is quite common for the first block inside a loop to
483 end with a conditional which is either always true or always
484 false when reached via the loop backedge. Thus we do not want
485 to blindly disable threading across a loop backedge. */
488 thread_across_edge (tree dummy_cond
,
490 bool handle_dominating_asserts
,
491 VEC(tree
, heap
) **stack
,
492 tree (*simplify
) (tree
))
496 /* If E is a backedge, then we want to verify that the COND_EXPR,
497 SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected
498 by any statements in e->dest. If it is affected, then it is not
499 safe to thread this edge. */
500 if (e
->flags
& EDGE_DFS_BACK
)
504 tree last
= bsi_stmt (bsi_last (e
->dest
));
506 FOR_EACH_SSA_USE_OPERAND (use_p
, last
, iter
, SSA_OP_USE
| SSA_OP_VUSE
)
508 tree use
= USE_FROM_PTR (use_p
);
510 if (TREE_CODE (use
) == SSA_NAME
511 && TREE_CODE (SSA_NAME_DEF_STMT (use
)) != PHI_NODE
512 && bb_for_stmt (SSA_NAME_DEF_STMT (use
)) == e
->dest
)
519 /* PHIs create temporary equivalences. */
520 if (!record_temporary_equivalences_from_phis (e
, stack
))
523 /* Now walk each statement recording any context sensitive
524 temporary equivalences we can detect. */
525 stmt
= record_temporary_equivalences_from_stmts_at_dest (e
, stack
, simplify
);
529 /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
531 if (TREE_CODE (stmt
) == COND_EXPR
532 || TREE_CODE (stmt
) == GOTO_EXPR
533 || TREE_CODE (stmt
) == SWITCH_EXPR
)
537 /* Extract and simplify the condition. */
538 cond
= simplify_control_stmt_condition (e
, stmt
, dummy_cond
, simplify
, handle_dominating_asserts
);
540 if (cond
&& is_gimple_min_invariant (cond
))
542 edge taken_edge
= find_taken_edge (e
->dest
, cond
);
543 basic_block dest
= (taken_edge
? taken_edge
->dest
: NULL
);
548 remove_temporary_equivalences (stack
);
549 register_jump_thread (e
, taken_edge
);
554 remove_temporary_equivalences (stack
);