1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t
;
61 /* Block the current statement belongs to. */
64 /* A vector of label and return statements to be moved to the end
66 vec
<return_statements_t
> return_statements
;
68 /* True if the current statement cannot fall through. */
71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp
;
75 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
76 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
77 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
78 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
82 /* Lower the body of current_function_decl from High GIMPLE into Low
86 lower_function_body (void)
88 struct lower_data data
;
89 gimple_seq body
= gimple_body (current_function_decl
);
90 gimple_seq lowered_body
;
91 gimple_stmt_iterator i
;
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
99 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
101 memset (&data
, 0, sizeof (data
));
102 data
.block
= DECL_INITIAL (current_function_decl
);
103 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
104 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
105 TREE_ASM_WRITTEN (data
.block
) = 1;
106 data
.return_statements
.create (8);
108 bind
= gimple_seq_first_stmt (body
);
110 gimple_seq_add_stmt (&lowered_body
, bind
);
111 i
= gsi_start (lowered_body
);
112 lower_gimple_bind (&i
, &data
);
114 i
= gsi_last (lowered_body
);
116 /* If the function falls off the end, we need a null return statement.
117 If we've already got one in the return_statements vector, we don't
118 need to do anything special. Otherwise build one by hand. */
119 if (gimple_seq_may_fallthru (lowered_body
)
120 && (data
.return_statements
.is_empty ()
121 || gimple_return_retval (data
.return_statements
.last().stmt
) != NULL
))
123 x
= gimple_build_return (NULL
);
124 gimple_set_location (x
, cfun
->function_end_locus
);
125 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
126 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
129 /* If we lowered any return statements, emit the representative
130 at the end of the function. */
131 while (!data
.return_statements
.is_empty ())
133 return_statements_t t
= data
.return_statements
.pop ();
134 x
= gimple_build_label (t
.label
);
135 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
136 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
139 /* If the function calls __builtin_setjmp, we need to emit the computed
140 goto that will serve as the unique dispatcher for all the receivers. */
141 if (data
.calls_builtin_setjmp
)
143 tree disp_label
, disp_var
, arg
;
145 /* Build 'DISP_LABEL:' and insert. */
146 disp_label
= create_artificial_label (cfun
->function_end_locus
);
147 /* This mark will create forward edges from every call site. */
148 DECL_NONLOCAL (disp_label
) = 1;
149 cfun
->has_nonlocal_label
= 1;
150 x
= gimple_build_label (disp_label
);
151 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
153 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
155 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
156 arg
= build_addr (disp_label
, current_function_decl
);
157 t
= builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER
);
158 x
= gimple_build_call (t
, 1, arg
);
159 gimple_call_set_lhs (x
, disp_var
);
161 /* Build 'goto DISP_VAR;' and insert. */
162 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
163 x
= gimple_build_goto (disp_var
);
164 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
167 /* Once the old body has been lowered, replace it with the new
169 gimple_set_body (current_function_decl
, lowered_body
);
171 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
172 BLOCK_SUBBLOCKS (data
.block
)
173 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
175 clear_block_marks (data
.block
);
176 data
.return_statements
.release ();
180 struct gimple_opt_pass pass_lower_cf
=
185 OPTGROUP_NONE
, /* optinfo_flags */
187 lower_function_body
, /* execute */
190 0, /* static_pass_number */
192 PROP_gimple_any
, /* properties_required */
193 PROP_gimple_lcf
, /* properties_provided */
194 0, /* properties_destroyed */
195 0, /* todo_flags_start */
196 0 /* todo_flags_finish */
202 /* Verify if the type of the argument matches that of the function
203 declaration. If we cannot verify this or there is a mismatch,
207 gimple_check_call_args (gimple stmt
, tree fndecl
)
210 unsigned int i
, nargs
;
212 /* Calls to internal functions always match their signature. */
213 if (gimple_call_internal_p (stmt
))
216 nargs
= gimple_call_num_args (stmt
);
218 /* Get argument types for verification. */
220 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
222 parms
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
224 /* Verify if the type of the argument matches that of the function
225 declaration. If we cannot verify this or there is a mismatch,
227 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
229 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
231 i
++, p
= DECL_CHAIN (p
))
234 /* We cannot distinguish a varargs function from the case
235 of excess parameters, still deferring the inlining decision
236 to the callee is possible. */
239 arg
= gimple_call_arg (stmt
, i
);
240 if (p
== error_mark_node
241 || arg
== error_mark_node
242 || (!types_compatible_p (DECL_ARG_TYPE (p
), TREE_TYPE (arg
))
243 && !fold_convertible_p (DECL_ARG_TYPE (p
), arg
)))
249 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
252 /* If this is a varargs function defer inlining decision
256 arg
= gimple_call_arg (stmt
, i
);
257 if (TREE_VALUE (p
) == error_mark_node
258 || arg
== error_mark_node
259 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
260 || (!types_compatible_p (TREE_VALUE (p
), TREE_TYPE (arg
))
261 && !fold_convertible_p (TREE_VALUE (p
), arg
)))
273 /* Verify if the type of the argument and lhs of CALL_STMT matches
274 that of the function declaration CALLEE.
275 If we cannot verify this or there is a mismatch, return false. */
278 gimple_check_call_matching_types (gimple call_stmt
, tree callee
)
282 if ((DECL_RESULT (callee
)
283 && !DECL_BY_REFERENCE (DECL_RESULT (callee
))
284 && (lhs
= gimple_call_lhs (call_stmt
)) != NULL_TREE
285 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee
)),
287 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee
)), lhs
))
288 || !gimple_check_call_args (call_stmt
, callee
))
293 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
294 when they are changed -- if this has to be done, the lowering routine must
295 do it explicitly. DATA is passed through the recursion. */
298 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
300 gimple_stmt_iterator gsi
;
302 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
303 lower_stmt (&gsi
, data
);
307 /* Lower the OpenMP directive statement pointed by GSI. DATA is
308 passed through the recursion. */
311 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
315 stmt
= gsi_stmt (*gsi
);
317 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
318 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
319 gimple_omp_set_body (stmt
, NULL
);
324 /* Lower statement GSI. DATA is passed through the recursion. We try to
325 track the fallthruness of statements and get rid of unreachable return
326 statements in order to prevent the EH lowering pass from adding useless
327 edges that can cause bogus warnings to be issued later; this guess need
328 not be 100% accurate, simply be conservative and reset cannot_fallthru
329 to false if we don't know. */
332 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
334 gimple stmt
= gsi_stmt (*gsi
);
336 gimple_set_block (stmt
, data
->block
);
338 switch (gimple_code (stmt
))
341 lower_gimple_bind (gsi
, data
);
342 /* Propagate fallthruness. */
348 data
->cannot_fallthru
= true;
353 if (data
->cannot_fallthru
)
355 gsi_remove (gsi
, false);
356 /* Propagate fallthruness. */
360 lower_gimple_return (gsi
, data
);
361 data
->cannot_fallthru
= true;
366 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
367 lower_try_catch (gsi
, data
);
370 /* It must be a GIMPLE_TRY_FINALLY. */
371 bool cannot_fallthru
;
372 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
373 cannot_fallthru
= data
->cannot_fallthru
;
375 /* The finally clause is always executed after the try clause,
376 so if it does not fall through, then the try-finally will not
377 fall through. Otherwise, if the try clause does not fall
378 through, then when the finally clause falls through it will
379 resume execution wherever the try clause was going. So the
380 whole try-finally will only fall through if both the try
381 clause and the finally clause fall through. */
382 data
->cannot_fallthru
= false;
383 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
384 data
->cannot_fallthru
|= cannot_fallthru
;
390 lower_sequence (gimple_eh_else_n_body_ptr (stmt
), data
);
391 lower_sequence (gimple_eh_else_e_body_ptr (stmt
), data
);
399 case GIMPLE_EH_MUST_NOT_THROW
:
401 case GIMPLE_OMP_SECTIONS
:
402 case GIMPLE_OMP_SECTIONS_SWITCH
:
403 case GIMPLE_OMP_SECTION
:
404 case GIMPLE_OMP_SINGLE
:
405 case GIMPLE_OMP_MASTER
:
406 case GIMPLE_OMP_ORDERED
:
407 case GIMPLE_OMP_CRITICAL
:
408 case GIMPLE_OMP_RETURN
:
409 case GIMPLE_OMP_ATOMIC_LOAD
:
410 case GIMPLE_OMP_ATOMIC_STORE
:
411 case GIMPLE_OMP_CONTINUE
:
416 tree decl
= gimple_call_fndecl (stmt
);
419 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
421 tree arg
= gimple_call_arg (stmt
, i
);
423 TREE_SET_BLOCK (arg
, data
->block
);
427 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
428 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
430 lower_builtin_setjmp (gsi
);
431 data
->cannot_fallthru
= false;
432 data
->calls_builtin_setjmp
= true;
436 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
438 data
->cannot_fallthru
= true;
445 case GIMPLE_OMP_PARALLEL
:
446 case GIMPLE_OMP_TASK
:
447 data
->cannot_fallthru
= false;
448 lower_omp_directive (gsi
, data
);
449 data
->cannot_fallthru
= false;
452 case GIMPLE_TRANSACTION
:
453 lower_sequence (gimple_transaction_body_ptr (stmt
), data
);
460 data
->cannot_fallthru
= false;
464 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
467 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
469 tree old_block
= data
->block
;
470 gimple stmt
= gsi_stmt (*gsi
);
471 tree new_block
= gimple_bind_block (stmt
);
475 if (new_block
== old_block
)
477 /* The outermost block of the original function may not be the
478 outermost statement chain of the gimplified function. So we
479 may see the outermost block just inside the function. */
480 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
485 /* We do not expect to handle duplicate blocks. */
486 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
487 TREE_ASM_WRITTEN (new_block
) = 1;
489 /* Block tree may get clobbered by inlining. Normally this would
490 be fixed in rest_of_decl_compilation using block notes, but
491 since we are not going to emit them, it is up to us. */
492 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
493 BLOCK_SUBBLOCKS (old_block
) = new_block
;
494 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
495 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
497 data
->block
= new_block
;
501 record_vars (gimple_bind_vars (stmt
));
502 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
506 gcc_assert (data
->block
== new_block
);
508 BLOCK_SUBBLOCKS (new_block
)
509 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
510 data
->block
= old_block
;
513 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
514 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
515 gsi_remove (gsi
, false);
518 /* Same as above, but for a GIMPLE_TRY_CATCH. */
521 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
523 bool cannot_fallthru
;
524 gimple stmt
= gsi_stmt (*gsi
);
525 gimple_stmt_iterator i
;
527 /* We don't handle GIMPLE_TRY_FINALLY. */
528 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
530 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
531 cannot_fallthru
= data
->cannot_fallthru
;
533 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
534 switch (gimple_code (gsi_stmt (i
)))
537 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
538 catch expression and a body. The whole try/catch may fall
539 through iff any of the catch bodies falls through. */
540 for (; !gsi_end_p (i
); gsi_next (&i
))
542 data
->cannot_fallthru
= false;
543 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i
)), data
);
544 if (!data
->cannot_fallthru
)
545 cannot_fallthru
= false;
549 case GIMPLE_EH_FILTER
:
550 /* The exception filter expression only matters if there is an
551 exception. If the exception does not match EH_FILTER_TYPES,
552 we will execute EH_FILTER_FAILURE, and we will fall through
553 if that falls through. If the exception does match
554 EH_FILTER_TYPES, the stack unwinder will continue up the
555 stack, so we will not fall through. We don't know whether we
556 will throw an exception which matches EH_FILTER_TYPES or not,
557 so we just ignore EH_FILTER_TYPES and assume that we might
558 throw an exception which doesn't match. */
559 data
->cannot_fallthru
= false;
560 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
561 if (!data
->cannot_fallthru
)
562 cannot_fallthru
= false;
566 /* This case represents statements to be executed when an
567 exception occurs. Those statements are implicitly followed
568 by a GIMPLE_RESX to resume execution after the exception. So
569 in this case the try/catch never falls through. */
570 data
->cannot_fallthru
= false;
571 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
575 data
->cannot_fallthru
= cannot_fallthru
;
579 /* Try to determine whether a TRY_CATCH expression can fall through.
580 This is a subroutine of block_may_fallthru. */
583 try_catch_may_fallthru (const_tree stmt
)
585 tree_stmt_iterator i
;
587 /* If the TRY block can fall through, the whole TRY_CATCH can
589 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
592 i
= tsi_start (TREE_OPERAND (stmt
, 1));
593 switch (TREE_CODE (tsi_stmt (i
)))
596 /* We expect to see a sequence of CATCH_EXPR trees, each with a
597 catch expression and a body. The whole TRY_CATCH may fall
598 through iff any of the catch bodies falls through. */
599 for (; !tsi_end_p (i
); tsi_next (&i
))
601 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
607 /* The exception filter expression only matters if there is an
608 exception. If the exception does not match EH_FILTER_TYPES,
609 we will execute EH_FILTER_FAILURE, and we will fall through
610 if that falls through. If the exception does match
611 EH_FILTER_TYPES, the stack unwinder will continue up the
612 stack, so we will not fall through. We don't know whether we
613 will throw an exception which matches EH_FILTER_TYPES or not,
614 so we just ignore EH_FILTER_TYPES and assume that we might
615 throw an exception which doesn't match. */
616 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
619 /* This case represents statements to be executed when an
620 exception occurs. Those statements are implicitly followed
621 by a RESX statement to resume execution after the exception.
622 So in this case the TRY_CATCH never falls through. */
628 /* Same as above, but for a GIMPLE_TRY_CATCH. */
631 gimple_try_catch_may_fallthru (gimple stmt
)
633 gimple_stmt_iterator i
;
635 /* We don't handle GIMPLE_TRY_FINALLY. */
636 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
638 /* If the TRY block can fall through, the whole TRY_CATCH can
640 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
643 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
644 switch (gimple_code (gsi_stmt (i
)))
647 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
648 catch expression and a body. The whole try/catch may fall
649 through iff any of the catch bodies falls through. */
650 for (; !gsi_end_p (i
); gsi_next (&i
))
652 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
657 case GIMPLE_EH_FILTER
:
658 /* The exception filter expression only matters if there is an
659 exception. If the exception does not match EH_FILTER_TYPES,
660 we will execute EH_FILTER_FAILURE, and we will fall through
661 if that falls through. If the exception does match
662 EH_FILTER_TYPES, the stack unwinder will continue up the
663 stack, so we will not fall through. We don't know whether we
664 will throw an exception which matches EH_FILTER_TYPES or not,
665 so we just ignore EH_FILTER_TYPES and assume that we might
666 throw an exception which doesn't match. */
667 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
670 /* This case represents statements to be executed when an
671 exception occurs. Those statements are implicitly followed
672 by a GIMPLE_RESX to resume execution after the exception. So
673 in this case the try/catch never falls through. */
679 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
680 need not be 100% accurate; simply be conservative and return true if we
681 don't know. This is used only to avoid stupidly generating extra code.
682 If we're wrong, we'll just delete the extra code later. */
685 block_may_fallthru (const_tree block
)
687 /* This CONST_CAST is okay because expr_last returns its argument
688 unmodified and we assign it to a const_tree. */
689 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
691 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
695 /* Easy cases. If the last statement of the block implies
696 control transfer, then we can't fall through. */
700 /* If SWITCH_LABELS is set, this is lowered, and represents a
701 branch to a selected label and hence can not fall through.
702 Otherwise SWITCH_BODY is set, and the switch can fall
704 return SWITCH_LABELS (stmt
) == NULL_TREE
;
707 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
709 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
712 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
715 return try_catch_may_fallthru (stmt
);
717 case TRY_FINALLY_EXPR
:
718 /* The finally clause is always executed after the try clause,
719 so if it does not fall through, then the try-finally will not
720 fall through. Otherwise, if the try clause does not fall
721 through, then when the finally clause falls through it will
722 resume execution wherever the try clause was going. So the
723 whole try-finally will only fall through if both the try
724 clause and the finally clause fall through. */
725 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
726 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
729 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
730 stmt
= TREE_OPERAND (stmt
, 1);
736 /* Functions that do not return do not fall through. */
737 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
739 case CLEANUP_POINT_EXPR
:
740 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
748 /* Try to determine if we can continue executing the statement
749 immediately following STMT. This guess need not be 100% accurate;
750 simply be conservative and return true if we don't know. This is
751 used only to avoid stupidly generating extra code. If we're wrong,
752 we'll just delete the extra code later. */
755 gimple_stmt_may_fallthru (gimple stmt
)
760 switch (gimple_code (stmt
))
765 /* Easy cases. If the last statement of the seq implies
766 control transfer, then we can't fall through. */
770 /* Switch has already been lowered and represents a branch
771 to a selected label and hence can't fall through. */
775 /* GIMPLE_COND's are already lowered into a two-way branch. They
776 can't fall through. */
780 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
783 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
784 return gimple_try_catch_may_fallthru (stmt
);
786 /* It must be a GIMPLE_TRY_FINALLY. */
788 /* The finally clause is always executed after the try clause,
789 so if it does not fall through, then the try-finally will not
790 fall through. Otherwise, if the try clause does not fall
791 through, then when the finally clause falls through it will
792 resume execution wherever the try clause was going. So the
793 whole try-finally will only fall through if both the try
794 clause and the finally clause fall through. */
795 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
796 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
799 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt
))
800 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt
)));
803 /* Functions that do not return do not fall through. */
804 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
812 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
815 gimple_seq_may_fallthru (gimple_seq seq
)
817 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
821 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
824 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
826 gimple stmt
= gsi_stmt (*gsi
);
829 return_statements_t tmp_rs
;
831 /* Match this up with an existing return statement that's been created. */
832 for (i
= data
->return_statements
.length () - 1;
835 tmp_rs
= data
->return_statements
[i
];
837 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
839 /* Remove the line number from the representative return statement.
840 It now fills in for many such returns. Failure to remove this
841 will result in incorrect results for coverage analysis. */
842 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
848 /* Not found. Create a new label and record the return statement. */
849 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
851 data
->return_statements
.safe_push (tmp_rs
);
853 /* Generate a goto statement and remove the return statement. */
855 /* When not optimizing, make sure user returns are preserved. */
856 if (!optimize
&& gimple_has_location (stmt
))
857 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
858 t
= gimple_build_goto (tmp_rs
.label
);
859 gimple_set_location (t
, gimple_location (stmt
));
860 gimple_set_block (t
, gimple_block (stmt
));
861 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
862 gsi_remove (gsi
, false);
865 /* Lower a __builtin_setjmp GSI.
867 __builtin_setjmp is passed a pointer to an array of five words (not
868 all will be used on all machines). It operates similarly to the C
869 library function of the same name, but is more efficient.
871 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
872 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
873 __builtin_setjmp_dispatcher shared among all the instances; that's
874 why it is only emitted at the end by lower_function_body.
876 After full lowering, the body of the function should look like:
885 __builtin_setjmp_setup (&buf, &<D1847>);
889 __builtin_setjmp_receiver (&<D1847>);
892 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
896 __builtin_setjmp_setup (&buf, &<D2847>);
900 __builtin_setjmp_receiver (&<D2847>);
903 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
909 <D3853>: [non-local];
910 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
914 The dispatcher block will be both the unique destination of all the
915 abnormal call edges and the unique source of all the abnormal edges
916 to the receivers, thus keeping the complexity explosion localized. */
919 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
921 gimple stmt
= gsi_stmt (*gsi
);
922 location_t loc
= gimple_location (stmt
);
923 tree cont_label
= create_artificial_label (loc
);
924 tree next_label
= create_artificial_label (loc
);
928 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
929 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
930 FORCED_LABEL (next_label
) = 1;
932 dest
= gimple_call_lhs (stmt
);
934 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
935 arg
= build_addr (next_label
, current_function_decl
);
936 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
937 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
938 gimple_set_location (g
, loc
);
939 gimple_set_block (g
, gimple_block (stmt
));
940 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
942 /* Build 'DEST = 0' and insert. */
945 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
946 gimple_set_location (g
, loc
);
947 gimple_set_block (g
, gimple_block (stmt
));
948 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
951 /* Build 'goto CONT_LABEL' and insert. */
952 g
= gimple_build_goto (cont_label
);
953 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
955 /* Build 'NEXT_LABEL:' and insert. */
956 g
= gimple_build_label (next_label
);
957 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
959 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
960 arg
= build_addr (next_label
, current_function_decl
);
961 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
962 g
= gimple_build_call (t
, 1, arg
);
963 gimple_set_location (g
, loc
);
964 gimple_set_block (g
, gimple_block (stmt
));
965 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
967 /* Build 'DEST = 1' and insert. */
970 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
972 gimple_set_location (g
, loc
);
973 gimple_set_block (g
, gimple_block (stmt
));
974 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
977 /* Build 'CONT_LABEL:' and insert. */
978 g
= gimple_build_label (cont_label
);
979 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
981 /* Remove the call to __builtin_setjmp. */
982 gsi_remove (gsi
, false);
986 /* Record the variables in VARS into function FN. */
989 record_vars_into (tree vars
, tree fn
)
991 bool change_cfun
= fn
!= current_function_decl
;
994 push_cfun (DECL_STRUCT_FUNCTION (fn
));
996 for (; vars
; vars
= DECL_CHAIN (vars
))
1000 /* BIND_EXPRs contains also function/type/constant declarations
1001 we don't need to care about. */
1002 if (TREE_CODE (var
) != VAR_DECL
)
1005 /* Nothing to do in this case. */
1006 if (DECL_EXTERNAL (var
))
1009 /* Record the variable. */
1010 add_local_decl (cfun
, var
);
1018 /* Record the variables in VARS into current_function_decl. */
1021 record_vars (tree vars
)
1023 record_vars_into (vars
, current_function_decl
);