1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t
;
58 DEF_VEC_O(return_statements_t
);
59 DEF_VEC_ALLOC_O(return_statements_t
,heap
);
63 /* Block the current statement belongs to. */
66 /* A vector of label and return statements to be moved to the end
68 VEC(return_statements_t
,heap
) *return_statements
;
70 /* True if the current statement cannot fall through. */
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp
;
77 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
78 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
80 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
81 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
84 /* Lower the body of current_function_decl from High GIMPLE into Low
88 lower_function_body (void)
90 struct lower_data data
;
91 gimple_seq body
= gimple_body (current_function_decl
);
92 gimple_seq lowered_body
;
93 gimple_stmt_iterator i
;
98 /* The gimplifier should've left a body of exactly one statement,
99 namely a GIMPLE_BIND. */
100 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
101 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
103 memset (&data
, 0, sizeof (data
));
104 data
.block
= DECL_INITIAL (current_function_decl
);
105 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
106 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
107 TREE_ASM_WRITTEN (data
.block
) = 1;
108 data
.return_statements
= VEC_alloc (return_statements_t
, heap
, 8);
110 bind
= gimple_seq_first_stmt (body
);
112 gimple_seq_add_stmt (&lowered_body
, bind
);
113 i
= gsi_start (lowered_body
);
114 lower_gimple_bind (&i
, &data
);
116 i
= gsi_last (lowered_body
);
118 /* If the function falls off the end, we need a null return statement.
119 If we've already got one in the return_statements vector, we don't
120 need to do anything special. Otherwise build one by hand. */
121 if (gimple_seq_may_fallthru (lowered_body
)
122 && (VEC_empty (return_statements_t
, data
.return_statements
)
123 || gimple_return_retval (VEC_last (return_statements_t
,
124 data
.return_statements
).stmt
) != NULL
))
126 x
= gimple_build_return (NULL
);
127 gimple_set_location (x
, cfun
->function_end_locus
);
128 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
129 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
132 /* If we lowered any return statements, emit the representative
133 at the end of the function. */
134 while (!VEC_empty (return_statements_t
, data
.return_statements
))
136 return_statements_t t
;
138 /* Unfortunately, we can't use VEC_pop because it returns void for
140 t
= VEC_last (return_statements_t
, data
.return_statements
);
141 VEC_truncate (return_statements_t
,
142 data
.return_statements
,
143 VEC_length (return_statements_t
,
144 data
.return_statements
) - 1);
146 x
= gimple_build_label (t
.label
);
147 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
148 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
151 /* If the function calls __builtin_setjmp, we need to emit the computed
152 goto that will serve as the unique dispatcher for all the receivers. */
153 if (data
.calls_builtin_setjmp
)
155 tree disp_label
, disp_var
, arg
;
157 /* Build 'DISP_LABEL:' and insert. */
158 disp_label
= create_artificial_label (cfun
->function_end_locus
);
159 /* This mark will create forward edges from every call site. */
160 DECL_NONLOCAL (disp_label
) = 1;
161 cfun
->has_nonlocal_label
= 1;
162 x
= gimple_build_label (disp_label
);
163 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
165 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
167 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
168 arg
= build_addr (disp_label
, current_function_decl
);
169 t
= builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER
);
170 x
= gimple_build_call (t
, 1, arg
);
171 gimple_call_set_lhs (x
, disp_var
);
173 /* Build 'goto DISP_VAR;' and insert. */
174 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
175 x
= gimple_build_goto (disp_var
);
176 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
179 /* Once the old body has been lowered, replace it with the new
181 gimple_set_body (current_function_decl
, lowered_body
);
183 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
184 BLOCK_SUBBLOCKS (data
.block
)
185 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
187 clear_block_marks (data
.block
);
188 VEC_free(return_statements_t
, heap
, data
.return_statements
);
192 struct gimple_opt_pass pass_lower_cf
=
198 lower_function_body
, /* execute */
201 0, /* static_pass_number */
203 PROP_gimple_any
, /* properties_required */
204 PROP_gimple_lcf
, /* properties_provided */
205 0, /* properties_destroyed */
206 0, /* todo_flags_start */
207 0 /* todo_flags_finish */
213 /* Verify if the type of the argument matches that of the function
214 declaration. If we cannot verify this or there is a mismatch,
218 gimple_check_call_args (gimple stmt
, tree fndecl
)
221 unsigned int i
, nargs
;
223 /* Calls to internal functions always match their signature. */
224 if (gimple_call_internal_p (stmt
))
227 nargs
= gimple_call_num_args (stmt
);
229 /* Get argument types for verification. */
231 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
233 parms
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
235 /* Verify if the type of the argument matches that of the function
236 declaration. If we cannot verify this or there is a mismatch,
238 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
240 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
242 i
++, p
= DECL_CHAIN (p
))
245 /* We cannot distinguish a varargs function from the case
246 of excess parameters, still deferring the inlining decision
247 to the callee is possible. */
250 arg
= gimple_call_arg (stmt
, i
);
251 if (p
== error_mark_node
252 || arg
== error_mark_node
253 || (!types_compatible_p (DECL_ARG_TYPE (p
), TREE_TYPE (arg
))
254 && !fold_convertible_p (DECL_ARG_TYPE (p
), arg
)))
260 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
263 /* If this is a varargs function defer inlining decision
267 arg
= gimple_call_arg (stmt
, i
);
268 if (TREE_VALUE (p
) == error_mark_node
269 || arg
== error_mark_node
270 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
271 || (!types_compatible_p (TREE_VALUE (p
), TREE_TYPE (arg
))
272 && !fold_convertible_p (TREE_VALUE (p
), arg
)))
284 /* Verify if the type of the argument and lhs of CALL_STMT matches
285 that of the function declaration CALLEE.
286 If we cannot verify this or there is a mismatch, return false. */
289 gimple_check_call_matching_types (gimple call_stmt
, tree callee
)
293 if ((DECL_RESULT (callee
)
294 && !DECL_BY_REFERENCE (DECL_RESULT (callee
))
295 && (lhs
= gimple_call_lhs (call_stmt
)) != NULL_TREE
296 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee
)),
298 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee
)), lhs
))
299 || !gimple_check_call_args (call_stmt
, callee
))
304 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
305 when they are changed -- if this has to be done, the lowering routine must
306 do it explicitly. DATA is passed through the recursion. */
309 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
311 gimple_stmt_iterator gsi
;
313 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
314 lower_stmt (&gsi
, data
);
318 /* Lower the OpenMP directive statement pointed by GSI. DATA is
319 passed through the recursion. */
322 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
326 stmt
= gsi_stmt (*gsi
);
328 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
329 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
330 gimple_omp_set_body (stmt
, NULL
);
335 /* Lower statement GSI. DATA is passed through the recursion. We try to
336 track the fallthruness of statements and get rid of unreachable return
337 statements in order to prevent the EH lowering pass from adding useless
338 edges that can cause bogus warnings to be issued later; this guess need
339 not be 100% accurate, simply be conservative and reset cannot_fallthru
340 to false if we don't know. */
343 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
345 gimple stmt
= gsi_stmt (*gsi
);
347 gimple_set_block (stmt
, data
->block
);
349 switch (gimple_code (stmt
))
352 lower_gimple_bind (gsi
, data
);
353 /* Propagate fallthruness. */
359 data
->cannot_fallthru
= true;
364 if (data
->cannot_fallthru
)
366 gsi_remove (gsi
, false);
367 /* Propagate fallthruness. */
371 lower_gimple_return (gsi
, data
);
372 data
->cannot_fallthru
= true;
377 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
378 lower_try_catch (gsi
, data
);
381 /* It must be a GIMPLE_TRY_FINALLY. */
382 bool cannot_fallthru
;
383 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
384 cannot_fallthru
= data
->cannot_fallthru
;
386 /* The finally clause is always executed after the try clause,
387 so if it does not fall through, then the try-finally will not
388 fall through. Otherwise, if the try clause does not fall
389 through, then when the finally clause falls through it will
390 resume execution wherever the try clause was going. So the
391 whole try-finally will only fall through if both the try
392 clause and the finally clause fall through. */
393 data
->cannot_fallthru
= false;
394 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
395 data
->cannot_fallthru
|= cannot_fallthru
;
401 lower_sequence (gimple_eh_else_n_body_ptr (stmt
), data
);
402 lower_sequence (gimple_eh_else_e_body_ptr (stmt
), data
);
410 case GIMPLE_EH_MUST_NOT_THROW
:
412 case GIMPLE_OMP_SECTIONS
:
413 case GIMPLE_OMP_SECTIONS_SWITCH
:
414 case GIMPLE_OMP_SECTION
:
415 case GIMPLE_OMP_SINGLE
:
416 case GIMPLE_OMP_MASTER
:
417 case GIMPLE_OMP_ORDERED
:
418 case GIMPLE_OMP_CRITICAL
:
419 case GIMPLE_OMP_RETURN
:
420 case GIMPLE_OMP_ATOMIC_LOAD
:
421 case GIMPLE_OMP_ATOMIC_STORE
:
422 case GIMPLE_OMP_CONTINUE
:
427 tree decl
= gimple_call_fndecl (stmt
);
430 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
432 tree arg
= gimple_call_arg (stmt
, i
);
434 TREE_SET_BLOCK (arg
, data
->block
);
438 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
439 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
441 lower_builtin_setjmp (gsi
);
442 data
->cannot_fallthru
= false;
443 data
->calls_builtin_setjmp
= true;
447 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
449 data
->cannot_fallthru
= true;
456 case GIMPLE_OMP_PARALLEL
:
457 case GIMPLE_OMP_TASK
:
458 data
->cannot_fallthru
= false;
459 lower_omp_directive (gsi
, data
);
460 data
->cannot_fallthru
= false;
463 case GIMPLE_TRANSACTION
:
464 lower_sequence (gimple_transaction_body_ptr (stmt
), data
);
471 data
->cannot_fallthru
= false;
475 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
478 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
480 tree old_block
= data
->block
;
481 gimple stmt
= gsi_stmt (*gsi
);
482 tree new_block
= gimple_bind_block (stmt
);
486 if (new_block
== old_block
)
488 /* The outermost block of the original function may not be the
489 outermost statement chain of the gimplified function. So we
490 may see the outermost block just inside the function. */
491 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
496 /* We do not expect to handle duplicate blocks. */
497 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
498 TREE_ASM_WRITTEN (new_block
) = 1;
500 /* Block tree may get clobbered by inlining. Normally this would
501 be fixed in rest_of_decl_compilation using block notes, but
502 since we are not going to emit them, it is up to us. */
503 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
504 BLOCK_SUBBLOCKS (old_block
) = new_block
;
505 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
506 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
508 data
->block
= new_block
;
512 record_vars (gimple_bind_vars (stmt
));
513 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
517 gcc_assert (data
->block
== new_block
);
519 BLOCK_SUBBLOCKS (new_block
)
520 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
521 data
->block
= old_block
;
524 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
525 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
526 gsi_remove (gsi
, false);
529 /* Same as above, but for a GIMPLE_TRY_CATCH. */
532 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
534 bool cannot_fallthru
;
535 gimple stmt
= gsi_stmt (*gsi
);
536 gimple_stmt_iterator i
;
538 /* We don't handle GIMPLE_TRY_FINALLY. */
539 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
541 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
542 cannot_fallthru
= data
->cannot_fallthru
;
544 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
545 switch (gimple_code (gsi_stmt (i
)))
548 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
549 catch expression and a body. The whole try/catch may fall
550 through iff any of the catch bodies falls through. */
551 for (; !gsi_end_p (i
); gsi_next (&i
))
553 data
->cannot_fallthru
= false;
554 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i
)), data
);
555 if (!data
->cannot_fallthru
)
556 cannot_fallthru
= false;
560 case GIMPLE_EH_FILTER
:
561 /* The exception filter expression only matters if there is an
562 exception. If the exception does not match EH_FILTER_TYPES,
563 we will execute EH_FILTER_FAILURE, and we will fall through
564 if that falls through. If the exception does match
565 EH_FILTER_TYPES, the stack unwinder will continue up the
566 stack, so we will not fall through. We don't know whether we
567 will throw an exception which matches EH_FILTER_TYPES or not,
568 so we just ignore EH_FILTER_TYPES and assume that we might
569 throw an exception which doesn't match. */
570 data
->cannot_fallthru
= false;
571 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
572 if (!data
->cannot_fallthru
)
573 cannot_fallthru
= false;
577 /* This case represents statements to be executed when an
578 exception occurs. Those statements are implicitly followed
579 by a GIMPLE_RESX to resume execution after the exception. So
580 in this case the try/catch never falls through. */
581 data
->cannot_fallthru
= false;
582 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
586 data
->cannot_fallthru
= cannot_fallthru
;
590 /* Try to determine whether a TRY_CATCH expression can fall through.
591 This is a subroutine of block_may_fallthru. */
594 try_catch_may_fallthru (const_tree stmt
)
596 tree_stmt_iterator i
;
598 /* If the TRY block can fall through, the whole TRY_CATCH can
600 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
603 i
= tsi_start (TREE_OPERAND (stmt
, 1));
604 switch (TREE_CODE (tsi_stmt (i
)))
607 /* We expect to see a sequence of CATCH_EXPR trees, each with a
608 catch expression and a body. The whole TRY_CATCH may fall
609 through iff any of the catch bodies falls through. */
610 for (; !tsi_end_p (i
); tsi_next (&i
))
612 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
618 /* The exception filter expression only matters if there is an
619 exception. If the exception does not match EH_FILTER_TYPES,
620 we will execute EH_FILTER_FAILURE, and we will fall through
621 if that falls through. If the exception does match
622 EH_FILTER_TYPES, the stack unwinder will continue up the
623 stack, so we will not fall through. We don't know whether we
624 will throw an exception which matches EH_FILTER_TYPES or not,
625 so we just ignore EH_FILTER_TYPES and assume that we might
626 throw an exception which doesn't match. */
627 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
630 /* This case represents statements to be executed when an
631 exception occurs. Those statements are implicitly followed
632 by a RESX statement to resume execution after the exception.
633 So in this case the TRY_CATCH never falls through. */
639 /* Same as above, but for a GIMPLE_TRY_CATCH. */
642 gimple_try_catch_may_fallthru (gimple stmt
)
644 gimple_stmt_iterator i
;
646 /* We don't handle GIMPLE_TRY_FINALLY. */
647 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
649 /* If the TRY block can fall through, the whole TRY_CATCH can
651 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
654 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
655 switch (gimple_code (gsi_stmt (i
)))
658 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
659 catch expression and a body. The whole try/catch may fall
660 through iff any of the catch bodies falls through. */
661 for (; !gsi_end_p (i
); gsi_next (&i
))
663 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
668 case GIMPLE_EH_FILTER
:
669 /* The exception filter expression only matters if there is an
670 exception. If the exception does not match EH_FILTER_TYPES,
671 we will execute EH_FILTER_FAILURE, and we will fall through
672 if that falls through. If the exception does match
673 EH_FILTER_TYPES, the stack unwinder will continue up the
674 stack, so we will not fall through. We don't know whether we
675 will throw an exception which matches EH_FILTER_TYPES or not,
676 so we just ignore EH_FILTER_TYPES and assume that we might
677 throw an exception which doesn't match. */
678 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
681 /* This case represents statements to be executed when an
682 exception occurs. Those statements are implicitly followed
683 by a GIMPLE_RESX to resume execution after the exception. So
684 in this case the try/catch never falls through. */
690 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
691 need not be 100% accurate; simply be conservative and return true if we
692 don't know. This is used only to avoid stupidly generating extra code.
693 If we're wrong, we'll just delete the extra code later. */
696 block_may_fallthru (const_tree block
)
698 /* This CONST_CAST is okay because expr_last returns its argument
699 unmodified and we assign it to a const_tree. */
700 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
702 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
706 /* Easy cases. If the last statement of the block implies
707 control transfer, then we can't fall through. */
711 /* If SWITCH_LABELS is set, this is lowered, and represents a
712 branch to a selected label and hence can not fall through.
713 Otherwise SWITCH_BODY is set, and the switch can fall
715 return SWITCH_LABELS (stmt
) == NULL_TREE
;
718 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
720 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
723 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
726 return try_catch_may_fallthru (stmt
);
728 case TRY_FINALLY_EXPR
:
729 /* The finally clause is always executed after the try clause,
730 so if it does not fall through, then the try-finally will not
731 fall through. Otherwise, if the try clause does not fall
732 through, then when the finally clause falls through it will
733 resume execution wherever the try clause was going. So the
734 whole try-finally will only fall through if both the try
735 clause and the finally clause fall through. */
736 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
737 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
740 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
741 stmt
= TREE_OPERAND (stmt
, 1);
747 /* Functions that do not return do not fall through. */
748 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
750 case CLEANUP_POINT_EXPR
:
751 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
759 /* Try to determine if we can continue executing the statement
760 immediately following STMT. This guess need not be 100% accurate;
761 simply be conservative and return true if we don't know. This is
762 used only to avoid stupidly generating extra code. If we're wrong,
763 we'll just delete the extra code later. */
766 gimple_stmt_may_fallthru (gimple stmt
)
771 switch (gimple_code (stmt
))
776 /* Easy cases. If the last statement of the seq implies
777 control transfer, then we can't fall through. */
781 /* Switch has already been lowered and represents a branch
782 to a selected label and hence can't fall through. */
786 /* GIMPLE_COND's are already lowered into a two-way branch. They
787 can't fall through. */
791 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
794 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
795 return gimple_try_catch_may_fallthru (stmt
);
797 /* It must be a GIMPLE_TRY_FINALLY. */
799 /* The finally clause is always executed after the try clause,
800 so if it does not fall through, then the try-finally will not
801 fall through. Otherwise, if the try clause does not fall
802 through, then when the finally clause falls through it will
803 resume execution wherever the try clause was going. So the
804 whole try-finally will only fall through if both the try
805 clause and the finally clause fall through. */
806 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
807 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
810 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt
))
811 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt
)));
814 /* Functions that do not return do not fall through. */
815 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
823 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
826 gimple_seq_may_fallthru (gimple_seq seq
)
828 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
832 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
835 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
837 gimple stmt
= gsi_stmt (*gsi
);
840 return_statements_t tmp_rs
;
842 /* Match this up with an existing return statement that's been created. */
843 for (i
= VEC_length (return_statements_t
, data
->return_statements
) - 1;
846 tmp_rs
= VEC_index (return_statements_t
, data
->return_statements
, i
);
848 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
850 /* Remove the line number from the representative return statement.
851 It now fills in for many such returns. Failure to remove this
852 will result in incorrect results for coverage analysis. */
853 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
859 /* Not found. Create a new label and record the return statement. */
860 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
862 VEC_safe_push (return_statements_t
, heap
, data
->return_statements
, tmp_rs
);
864 /* Generate a goto statement and remove the return statement. */
866 /* When not optimizing, make sure user returns are preserved. */
867 if (!optimize
&& gimple_has_location (stmt
))
868 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
869 t
= gimple_build_goto (tmp_rs
.label
);
870 gimple_set_location (t
, gimple_location (stmt
));
871 gimple_set_block (t
, gimple_block (stmt
));
872 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
873 gsi_remove (gsi
, false);
876 /* Lower a __builtin_setjmp GSI.
878 __builtin_setjmp is passed a pointer to an array of five words (not
879 all will be used on all machines). It operates similarly to the C
880 library function of the same name, but is more efficient.
882 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
883 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
884 __builtin_setjmp_dispatcher shared among all the instances; that's
885 why it is only emitted at the end by lower_function_body.
887 After full lowering, the body of the function should look like:
896 __builtin_setjmp_setup (&buf, &<D1847>);
900 __builtin_setjmp_receiver (&<D1847>);
903 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
907 __builtin_setjmp_setup (&buf, &<D2847>);
911 __builtin_setjmp_receiver (&<D2847>);
914 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
920 <D3853>: [non-local];
921 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
925 The dispatcher block will be both the unique destination of all the
926 abnormal call edges and the unique source of all the abnormal edges
927 to the receivers, thus keeping the complexity explosion localized. */
930 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
932 gimple stmt
= gsi_stmt (*gsi
);
933 location_t loc
= gimple_location (stmt
);
934 tree cont_label
= create_artificial_label (loc
);
935 tree next_label
= create_artificial_label (loc
);
939 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
940 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
941 FORCED_LABEL (next_label
) = 1;
943 dest
= gimple_call_lhs (stmt
);
945 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
946 arg
= build_addr (next_label
, current_function_decl
);
947 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
948 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
949 gimple_set_location (g
, loc
);
950 gimple_set_block (g
, gimple_block (stmt
));
951 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
953 /* Build 'DEST = 0' and insert. */
956 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
957 gimple_set_location (g
, loc
);
958 gimple_set_block (g
, gimple_block (stmt
));
959 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
962 /* Build 'goto CONT_LABEL' and insert. */
963 g
= gimple_build_goto (cont_label
);
964 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
966 /* Build 'NEXT_LABEL:' and insert. */
967 g
= gimple_build_label (next_label
);
968 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
970 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
971 arg
= build_addr (next_label
, current_function_decl
);
972 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
973 g
= gimple_build_call (t
, 1, arg
);
974 gimple_set_location (g
, loc
);
975 gimple_set_block (g
, gimple_block (stmt
));
976 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
978 /* Build 'DEST = 1' and insert. */
981 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
983 gimple_set_location (g
, loc
);
984 gimple_set_block (g
, gimple_block (stmt
));
985 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
988 /* Build 'CONT_LABEL:' and insert. */
989 g
= gimple_build_label (cont_label
);
990 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
992 /* Remove the call to __builtin_setjmp. */
993 gsi_remove (gsi
, false);
997 /* Record the variables in VARS into function FN. */
1000 record_vars_into (tree vars
, tree fn
)
1002 bool change_cfun
= fn
!= current_function_decl
;
1005 push_cfun (DECL_STRUCT_FUNCTION (fn
));
1007 for (; vars
; vars
= DECL_CHAIN (vars
))
1011 /* BIND_EXPRs contains also function/type/constant declarations
1012 we don't need to care about. */
1013 if (TREE_CODE (var
) != VAR_DECL
)
1016 /* Nothing to do in this case. */
1017 if (DECL_EXTERNAL (var
))
1020 /* Record the variable. */
1021 add_local_decl (cfun
, var
);
1029 /* Record the variables in VARS into current_function_decl. */
1032 record_vars (tree vars
)
1034 record_vars_into (vars
, current_function_decl
);