1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2013 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-iterator.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
32 #include "diagnostic-core.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t
;
61 /* Block the current statement belongs to. */
64 /* A vector of label and return statements to be moved to the end
66 vec
<return_statements_t
> return_statements
;
68 /* True if the current statement cannot fall through. */
71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp
;
75 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
76 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
77 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
78 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
82 /* Lower the body of current_function_decl from High GIMPLE into Low
86 lower_function_body (void)
88 struct lower_data data
;
89 gimple_seq body
= gimple_body (current_function_decl
);
90 gimple_seq lowered_body
;
91 gimple_stmt_iterator i
;
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
99 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
101 memset (&data
, 0, sizeof (data
));
102 data
.block
= DECL_INITIAL (current_function_decl
);
103 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
104 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
105 TREE_ASM_WRITTEN (data
.block
) = 1;
106 data
.return_statements
.create (8);
108 bind
= gimple_seq_first_stmt (body
);
110 gimple_seq_add_stmt (&lowered_body
, bind
);
111 i
= gsi_start (lowered_body
);
112 lower_gimple_bind (&i
, &data
);
114 i
= gsi_last (lowered_body
);
116 /* If the function falls off the end, we need a null return statement.
117 If we've already got one in the return_statements vector, we don't
118 need to do anything special. Otherwise build one by hand. */
119 if (gimple_seq_may_fallthru (lowered_body
)
120 && (data
.return_statements
.is_empty ()
121 || gimple_return_retval (data
.return_statements
.last().stmt
) != NULL
))
123 x
= gimple_build_return (NULL
);
124 gimple_set_location (x
, cfun
->function_end_locus
);
125 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
126 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
129 /* If we lowered any return statements, emit the representative
130 at the end of the function. */
131 while (!data
.return_statements
.is_empty ())
133 return_statements_t t
= data
.return_statements
.pop ();
134 x
= gimple_build_label (t
.label
);
135 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
136 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
139 /* If the function calls __builtin_setjmp, we need to emit the computed
140 goto that will serve as the unique dispatcher for all the receivers. */
141 if (data
.calls_builtin_setjmp
)
143 tree disp_label
, disp_var
, arg
;
145 /* Build 'DISP_LABEL:' and insert. */
146 disp_label
= create_artificial_label (cfun
->function_end_locus
);
147 /* This mark will create forward edges from every call site. */
148 DECL_NONLOCAL (disp_label
) = 1;
149 cfun
->has_nonlocal_label
= 1;
150 x
= gimple_build_label (disp_label
);
151 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
153 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
155 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
156 arg
= build_addr (disp_label
, current_function_decl
);
157 t
= builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER
);
158 x
= gimple_build_call (t
, 1, arg
);
159 gimple_call_set_lhs (x
, disp_var
);
161 /* Build 'goto DISP_VAR;' and insert. */
162 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
163 x
= gimple_build_goto (disp_var
);
164 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
167 /* Once the old body has been lowered, replace it with the new
169 gimple_set_body (current_function_decl
, lowered_body
);
171 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
172 BLOCK_SUBBLOCKS (data
.block
)
173 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
175 clear_block_marks (data
.block
);
176 data
.return_statements
.release ();
180 struct gimple_opt_pass pass_lower_cf
=
185 OPTGROUP_NONE
, /* optinfo_flags */
187 lower_function_body
, /* execute */
190 0, /* static_pass_number */
192 PROP_gimple_any
, /* properties_required */
193 PROP_gimple_lcf
, /* properties_provided */
194 0, /* properties_destroyed */
195 0, /* todo_flags_start */
196 0 /* todo_flags_finish */
202 /* Verify if the type of the argument matches that of the function
203 declaration. If we cannot verify this or there is a mismatch,
207 gimple_check_call_args (gimple stmt
, tree fndecl
, bool args_count_match
)
210 unsigned int i
, nargs
;
212 /* Calls to internal functions always match their signature. */
213 if (gimple_call_internal_p (stmt
))
216 nargs
= gimple_call_num_args (stmt
);
218 /* Get argument types for verification. */
220 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
222 parms
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
224 /* Verify if the type of the argument matches that of the function
225 declaration. If we cannot verify this or there is a mismatch,
227 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
229 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
231 i
++, p
= DECL_CHAIN (p
))
234 /* We cannot distinguish a varargs function from the case
235 of excess parameters, still deferring the inlining decision
236 to the callee is possible. */
239 arg
= gimple_call_arg (stmt
, i
);
240 if (p
== error_mark_node
241 || arg
== error_mark_node
242 || (!types_compatible_p (DECL_ARG_TYPE (p
), TREE_TYPE (arg
))
243 && !fold_convertible_p (DECL_ARG_TYPE (p
), arg
)))
246 if (args_count_match
&& p
)
251 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
254 /* If this is a varargs function defer inlining decision
258 arg
= gimple_call_arg (stmt
, i
);
259 if (TREE_VALUE (p
) == error_mark_node
260 || arg
== error_mark_node
261 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
262 || (!types_compatible_p (TREE_VALUE (p
), TREE_TYPE (arg
))
263 && !fold_convertible_p (TREE_VALUE (p
), arg
)))
275 /* Verify if the type of the argument and lhs of CALL_STMT matches
276 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
277 true, the arg count needs to be the same.
278 If we cannot verify this or there is a mismatch, return false. */
281 gimple_check_call_matching_types (gimple call_stmt
, tree callee
,
282 bool args_count_match
)
286 if ((DECL_RESULT (callee
)
287 && !DECL_BY_REFERENCE (DECL_RESULT (callee
))
288 && (lhs
= gimple_call_lhs (call_stmt
)) != NULL_TREE
289 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee
)),
291 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee
)), lhs
))
292 || !gimple_check_call_args (call_stmt
, callee
, args_count_match
))
297 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
298 when they are changed -- if this has to be done, the lowering routine must
299 do it explicitly. DATA is passed through the recursion. */
302 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
304 gimple_stmt_iterator gsi
;
306 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
307 lower_stmt (&gsi
, data
);
311 /* Lower the OpenMP directive statement pointed by GSI. DATA is
312 passed through the recursion. */
315 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
319 stmt
= gsi_stmt (*gsi
);
321 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
322 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
323 gimple_omp_set_body (stmt
, NULL
);
328 /* Lower statement GSI. DATA is passed through the recursion. We try to
329 track the fallthruness of statements and get rid of unreachable return
330 statements in order to prevent the EH lowering pass from adding useless
331 edges that can cause bogus warnings to be issued later; this guess need
332 not be 100% accurate, simply be conservative and reset cannot_fallthru
333 to false if we don't know. */
336 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
338 gimple stmt
= gsi_stmt (*gsi
);
340 gimple_set_block (stmt
, data
->block
);
342 switch (gimple_code (stmt
))
345 lower_gimple_bind (gsi
, data
);
346 /* Propagate fallthruness. */
352 data
->cannot_fallthru
= true;
357 if (data
->cannot_fallthru
)
359 gsi_remove (gsi
, false);
360 /* Propagate fallthruness. */
364 lower_gimple_return (gsi
, data
);
365 data
->cannot_fallthru
= true;
370 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
371 lower_try_catch (gsi
, data
);
374 /* It must be a GIMPLE_TRY_FINALLY. */
375 bool cannot_fallthru
;
376 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
377 cannot_fallthru
= data
->cannot_fallthru
;
379 /* The finally clause is always executed after the try clause,
380 so if it does not fall through, then the try-finally will not
381 fall through. Otherwise, if the try clause does not fall
382 through, then when the finally clause falls through it will
383 resume execution wherever the try clause was going. So the
384 whole try-finally will only fall through if both the try
385 clause and the finally clause fall through. */
386 data
->cannot_fallthru
= false;
387 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
388 data
->cannot_fallthru
|= cannot_fallthru
;
394 lower_sequence (gimple_eh_else_n_body_ptr (stmt
), data
);
395 lower_sequence (gimple_eh_else_e_body_ptr (stmt
), data
);
403 case GIMPLE_EH_MUST_NOT_THROW
:
405 case GIMPLE_OMP_SECTIONS
:
406 case GIMPLE_OMP_SECTIONS_SWITCH
:
407 case GIMPLE_OMP_SECTION
:
408 case GIMPLE_OMP_SINGLE
:
409 case GIMPLE_OMP_MASTER
:
410 case GIMPLE_OMP_ORDERED
:
411 case GIMPLE_OMP_CRITICAL
:
412 case GIMPLE_OMP_RETURN
:
413 case GIMPLE_OMP_ATOMIC_LOAD
:
414 case GIMPLE_OMP_ATOMIC_STORE
:
415 case GIMPLE_OMP_CONTINUE
:
420 tree decl
= gimple_call_fndecl (stmt
);
423 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
425 tree arg
= gimple_call_arg (stmt
, i
);
427 TREE_SET_BLOCK (arg
, data
->block
);
431 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
432 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
434 lower_builtin_setjmp (gsi
);
435 data
->cannot_fallthru
= false;
436 data
->calls_builtin_setjmp
= true;
440 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
442 data
->cannot_fallthru
= true;
449 case GIMPLE_OMP_PARALLEL
:
450 case GIMPLE_OMP_TASK
:
451 data
->cannot_fallthru
= false;
452 lower_omp_directive (gsi
, data
);
453 data
->cannot_fallthru
= false;
456 case GIMPLE_TRANSACTION
:
457 lower_sequence (gimple_transaction_body_ptr (stmt
), data
);
464 data
->cannot_fallthru
= false;
468 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
471 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
473 tree old_block
= data
->block
;
474 gimple stmt
= gsi_stmt (*gsi
);
475 tree new_block
= gimple_bind_block (stmt
);
479 if (new_block
== old_block
)
481 /* The outermost block of the original function may not be the
482 outermost statement chain of the gimplified function. So we
483 may see the outermost block just inside the function. */
484 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
489 /* We do not expect to handle duplicate blocks. */
490 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
491 TREE_ASM_WRITTEN (new_block
) = 1;
493 /* Block tree may get clobbered by inlining. Normally this would
494 be fixed in rest_of_decl_compilation using block notes, but
495 since we are not going to emit them, it is up to us. */
496 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
497 BLOCK_SUBBLOCKS (old_block
) = new_block
;
498 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
499 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
501 data
->block
= new_block
;
505 record_vars (gimple_bind_vars (stmt
));
506 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
510 gcc_assert (data
->block
== new_block
);
512 BLOCK_SUBBLOCKS (new_block
)
513 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
514 data
->block
= old_block
;
517 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
518 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
519 gsi_remove (gsi
, false);
522 /* Same as above, but for a GIMPLE_TRY_CATCH. */
525 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
527 bool cannot_fallthru
;
528 gimple stmt
= gsi_stmt (*gsi
);
529 gimple_stmt_iterator i
;
531 /* We don't handle GIMPLE_TRY_FINALLY. */
532 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
534 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
535 cannot_fallthru
= data
->cannot_fallthru
;
537 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
538 switch (gimple_code (gsi_stmt (i
)))
541 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
542 catch expression and a body. The whole try/catch may fall
543 through iff any of the catch bodies falls through. */
544 for (; !gsi_end_p (i
); gsi_next (&i
))
546 data
->cannot_fallthru
= false;
547 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i
)), data
);
548 if (!data
->cannot_fallthru
)
549 cannot_fallthru
= false;
553 case GIMPLE_EH_FILTER
:
554 /* The exception filter expression only matters if there is an
555 exception. If the exception does not match EH_FILTER_TYPES,
556 we will execute EH_FILTER_FAILURE, and we will fall through
557 if that falls through. If the exception does match
558 EH_FILTER_TYPES, the stack unwinder will continue up the
559 stack, so we will not fall through. We don't know whether we
560 will throw an exception which matches EH_FILTER_TYPES or not,
561 so we just ignore EH_FILTER_TYPES and assume that we might
562 throw an exception which doesn't match. */
563 data
->cannot_fallthru
= false;
564 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
565 if (!data
->cannot_fallthru
)
566 cannot_fallthru
= false;
570 /* This case represents statements to be executed when an
571 exception occurs. Those statements are implicitly followed
572 by a GIMPLE_RESX to resume execution after the exception. So
573 in this case the try/catch never falls through. */
574 data
->cannot_fallthru
= false;
575 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
579 data
->cannot_fallthru
= cannot_fallthru
;
583 /* Try to determine whether a TRY_CATCH expression can fall through.
584 This is a subroutine of block_may_fallthru. */
587 try_catch_may_fallthru (const_tree stmt
)
589 tree_stmt_iterator i
;
591 /* If the TRY block can fall through, the whole TRY_CATCH can
593 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
596 i
= tsi_start (TREE_OPERAND (stmt
, 1));
597 switch (TREE_CODE (tsi_stmt (i
)))
600 /* We expect to see a sequence of CATCH_EXPR trees, each with a
601 catch expression and a body. The whole TRY_CATCH may fall
602 through iff any of the catch bodies falls through. */
603 for (; !tsi_end_p (i
); tsi_next (&i
))
605 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
611 /* The exception filter expression only matters if there is an
612 exception. If the exception does not match EH_FILTER_TYPES,
613 we will execute EH_FILTER_FAILURE, and we will fall through
614 if that falls through. If the exception does match
615 EH_FILTER_TYPES, the stack unwinder will continue up the
616 stack, so we will not fall through. We don't know whether we
617 will throw an exception which matches EH_FILTER_TYPES or not,
618 so we just ignore EH_FILTER_TYPES and assume that we might
619 throw an exception which doesn't match. */
620 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
623 /* This case represents statements to be executed when an
624 exception occurs. Those statements are implicitly followed
625 by a RESX statement to resume execution after the exception.
626 So in this case the TRY_CATCH never falls through. */
632 /* Same as above, but for a GIMPLE_TRY_CATCH. */
635 gimple_try_catch_may_fallthru (gimple stmt
)
637 gimple_stmt_iterator i
;
639 /* We don't handle GIMPLE_TRY_FINALLY. */
640 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
642 /* If the TRY block can fall through, the whole TRY_CATCH can
644 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
647 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
648 switch (gimple_code (gsi_stmt (i
)))
651 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
652 catch expression and a body. The whole try/catch may fall
653 through iff any of the catch bodies falls through. */
654 for (; !gsi_end_p (i
); gsi_next (&i
))
656 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
661 case GIMPLE_EH_FILTER
:
662 /* The exception filter expression only matters if there is an
663 exception. If the exception does not match EH_FILTER_TYPES,
664 we will execute EH_FILTER_FAILURE, and we will fall through
665 if that falls through. If the exception does match
666 EH_FILTER_TYPES, the stack unwinder will continue up the
667 stack, so we will not fall through. We don't know whether we
668 will throw an exception which matches EH_FILTER_TYPES or not,
669 so we just ignore EH_FILTER_TYPES and assume that we might
670 throw an exception which doesn't match. */
671 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
674 /* This case represents statements to be executed when an
675 exception occurs. Those statements are implicitly followed
676 by a GIMPLE_RESX to resume execution after the exception. So
677 in this case the try/catch never falls through. */
683 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
684 need not be 100% accurate; simply be conservative and return true if we
685 don't know. This is used only to avoid stupidly generating extra code.
686 If we're wrong, we'll just delete the extra code later. */
689 block_may_fallthru (const_tree block
)
691 /* This CONST_CAST is okay because expr_last returns its argument
692 unmodified and we assign it to a const_tree. */
693 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
695 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
699 /* Easy cases. If the last statement of the block implies
700 control transfer, then we can't fall through. */
704 /* If SWITCH_LABELS is set, this is lowered, and represents a
705 branch to a selected label and hence can not fall through.
706 Otherwise SWITCH_BODY is set, and the switch can fall
708 return SWITCH_LABELS (stmt
) == NULL_TREE
;
711 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
713 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
716 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
719 return try_catch_may_fallthru (stmt
);
721 case TRY_FINALLY_EXPR
:
722 /* The finally clause is always executed after the try clause,
723 so if it does not fall through, then the try-finally will not
724 fall through. Otherwise, if the try clause does not fall
725 through, then when the finally clause falls through it will
726 resume execution wherever the try clause was going. So the
727 whole try-finally will only fall through if both the try
728 clause and the finally clause fall through. */
729 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
730 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
733 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
734 stmt
= TREE_OPERAND (stmt
, 1);
740 /* Functions that do not return do not fall through. */
741 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
743 case CLEANUP_POINT_EXPR
:
744 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
747 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
753 return lang_hooks
.block_may_fallthru (stmt
);
758 /* Try to determine if we can continue executing the statement
759 immediately following STMT. This guess need not be 100% accurate;
760 simply be conservative and return true if we don't know. This is
761 used only to avoid stupidly generating extra code. If we're wrong,
762 we'll just delete the extra code later. */
765 gimple_stmt_may_fallthru (gimple stmt
)
770 switch (gimple_code (stmt
))
775 /* Easy cases. If the last statement of the seq implies
776 control transfer, then we can't fall through. */
780 /* Switch has already been lowered and represents a branch
781 to a selected label and hence can't fall through. */
785 /* GIMPLE_COND's are already lowered into a two-way branch. They
786 can't fall through. */
790 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
793 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
794 return gimple_try_catch_may_fallthru (stmt
);
796 /* It must be a GIMPLE_TRY_FINALLY. */
798 /* The finally clause is always executed after the try clause,
799 so if it does not fall through, then the try-finally will not
800 fall through. Otherwise, if the try clause does not fall
801 through, then when the finally clause falls through it will
802 resume execution wherever the try clause was going. So the
803 whole try-finally will only fall through if both the try
804 clause and the finally clause fall through. */
805 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
806 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
809 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt
))
810 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt
)));
813 /* Functions that do not return do not fall through. */
814 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
822 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
825 gimple_seq_may_fallthru (gimple_seq seq
)
827 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
831 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
834 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
836 gimple stmt
= gsi_stmt (*gsi
);
839 return_statements_t tmp_rs
;
841 /* Match this up with an existing return statement that's been created. */
842 for (i
= data
->return_statements
.length () - 1;
845 tmp_rs
= data
->return_statements
[i
];
847 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
849 /* Remove the line number from the representative return statement.
850 It now fills in for many such returns. Failure to remove this
851 will result in incorrect results for coverage analysis. */
852 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
858 /* Not found. Create a new label and record the return statement. */
859 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
861 data
->return_statements
.safe_push (tmp_rs
);
863 /* Generate a goto statement and remove the return statement. */
865 /* When not optimizing, make sure user returns are preserved. */
866 if (!optimize
&& gimple_has_location (stmt
))
867 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
868 t
= gimple_build_goto (tmp_rs
.label
);
869 gimple_set_location (t
, gimple_location (stmt
));
870 gimple_set_block (t
, gimple_block (stmt
));
871 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
872 gsi_remove (gsi
, false);
875 /* Lower a __builtin_setjmp GSI.
877 __builtin_setjmp is passed a pointer to an array of five words (not
878 all will be used on all machines). It operates similarly to the C
879 library function of the same name, but is more efficient.
881 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
882 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
883 __builtin_setjmp_dispatcher shared among all the instances; that's
884 why it is only emitted at the end by lower_function_body.
886 After full lowering, the body of the function should look like:
895 __builtin_setjmp_setup (&buf, &<D1847>);
899 __builtin_setjmp_receiver (&<D1847>);
902 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
906 __builtin_setjmp_setup (&buf, &<D2847>);
910 __builtin_setjmp_receiver (&<D2847>);
913 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
919 <D3853>: [non-local];
920 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
924 The dispatcher block will be both the unique destination of all the
925 abnormal call edges and the unique source of all the abnormal edges
926 to the receivers, thus keeping the complexity explosion localized. */
929 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
931 gimple stmt
= gsi_stmt (*gsi
);
932 location_t loc
= gimple_location (stmt
);
933 tree cont_label
= create_artificial_label (loc
);
934 tree next_label
= create_artificial_label (loc
);
938 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
939 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
940 FORCED_LABEL (next_label
) = 1;
942 dest
= gimple_call_lhs (stmt
);
944 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
945 arg
= build_addr (next_label
, current_function_decl
);
946 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
947 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
948 gimple_set_location (g
, loc
);
949 gimple_set_block (g
, gimple_block (stmt
));
950 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
952 /* Build 'DEST = 0' and insert. */
955 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
956 gimple_set_location (g
, loc
);
957 gimple_set_block (g
, gimple_block (stmt
));
958 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
961 /* Build 'goto CONT_LABEL' and insert. */
962 g
= gimple_build_goto (cont_label
);
963 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
965 /* Build 'NEXT_LABEL:' and insert. */
966 g
= gimple_build_label (next_label
);
967 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
969 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
970 arg
= build_addr (next_label
, current_function_decl
);
971 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
972 g
= gimple_build_call (t
, 1, arg
);
973 gimple_set_location (g
, loc
);
974 gimple_set_block (g
, gimple_block (stmt
));
975 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
977 /* Build 'DEST = 1' and insert. */
980 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
982 gimple_set_location (g
, loc
);
983 gimple_set_block (g
, gimple_block (stmt
));
984 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
987 /* Build 'CONT_LABEL:' and insert. */
988 g
= gimple_build_label (cont_label
);
989 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
991 /* Remove the call to __builtin_setjmp. */
992 gsi_remove (gsi
, false);
996 /* Record the variables in VARS into function FN. */
999 record_vars_into (tree vars
, tree fn
)
1001 bool change_cfun
= fn
!= current_function_decl
;
1004 push_cfun (DECL_STRUCT_FUNCTION (fn
));
1006 for (; vars
; vars
= DECL_CHAIN (vars
))
1010 /* BIND_EXPRs contains also function/type/constant declarations
1011 we don't need to care about. */
1012 if (TREE_CODE (var
) != VAR_DECL
)
1015 /* Nothing to do in this case. */
1016 if (DECL_EXTERNAL (var
))
1019 /* Record the variable. */
1020 add_local_decl (cfun
, var
);
1028 /* Record the variables in VARS into current_function_decl. */
1031 record_vars (tree vars
)
1033 record_vars_into (vars
, current_function_decl
);