1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2013 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-iterator.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
32 #include "diagnostic-core.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t
;
61 /* Block the current statement belongs to. */
64 /* A vector of label and return statements to be moved to the end
66 vec
<return_statements_t
> return_statements
;
68 /* True if the current statement cannot fall through. */
71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp
;
75 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
76 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
77 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
78 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
82 /* Lower the body of current_function_decl from High GIMPLE into Low
86 lower_function_body (void)
88 struct lower_data data
;
89 gimple_seq body
= gimple_body (current_function_decl
);
90 gimple_seq lowered_body
;
91 gimple_stmt_iterator i
;
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
99 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
101 memset (&data
, 0, sizeof (data
));
102 data
.block
= DECL_INITIAL (current_function_decl
);
103 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
104 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
105 TREE_ASM_WRITTEN (data
.block
) = 1;
106 data
.return_statements
.create (8);
108 bind
= gimple_seq_first_stmt (body
);
110 gimple_seq_add_stmt (&lowered_body
, bind
);
111 i
= gsi_start (lowered_body
);
112 lower_gimple_bind (&i
, &data
);
114 i
= gsi_last (lowered_body
);
116 /* If the function falls off the end, we need a null return statement.
117 If we've already got one in the return_statements vector, we don't
118 need to do anything special. Otherwise build one by hand. */
119 if (gimple_seq_may_fallthru (lowered_body
)
120 && (data
.return_statements
.is_empty ()
121 || gimple_return_retval (data
.return_statements
.last().stmt
) != NULL
))
123 x
= gimple_build_return (NULL
);
124 gimple_set_location (x
, cfun
->function_end_locus
);
125 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
126 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
129 /* If we lowered any return statements, emit the representative
130 at the end of the function. */
131 while (!data
.return_statements
.is_empty ())
133 return_statements_t t
= data
.return_statements
.pop ();
134 x
= gimple_build_label (t
.label
);
135 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
136 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
139 /* If the function calls __builtin_setjmp, we need to emit the computed
140 goto that will serve as the unique dispatcher for all the receivers. */
141 if (data
.calls_builtin_setjmp
)
143 tree disp_label
, disp_var
, arg
;
145 /* Build 'DISP_LABEL:' and insert. */
146 disp_label
= create_artificial_label (cfun
->function_end_locus
);
147 /* This mark will create forward edges from every call site. */
148 DECL_NONLOCAL (disp_label
) = 1;
149 cfun
->has_nonlocal_label
= 1;
150 x
= gimple_build_label (disp_label
);
151 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
153 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
155 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
156 arg
= build_addr (disp_label
, current_function_decl
);
157 t
= builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER
);
158 x
= gimple_build_call (t
, 1, arg
);
159 gimple_call_set_lhs (x
, disp_var
);
161 /* Build 'goto DISP_VAR;' and insert. */
162 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
163 x
= gimple_build_goto (disp_var
);
164 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
167 /* Once the old body has been lowered, replace it with the new
169 gimple_set_body (current_function_decl
, lowered_body
);
171 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
172 BLOCK_SUBBLOCKS (data
.block
)
173 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
175 clear_block_marks (data
.block
);
176 data
.return_statements
.release ();
182 const pass_data pass_data_lower_cf
=
184 GIMPLE_PASS
, /* type */
186 OPTGROUP_NONE
, /* optinfo_flags */
187 false, /* has_gate */
188 true, /* has_execute */
190 PROP_gimple_any
, /* properties_required */
191 PROP_gimple_lcf
, /* properties_provided */
192 0, /* properties_destroyed */
193 0, /* todo_flags_start */
194 0, /* todo_flags_finish */
197 class pass_lower_cf
: public gimple_opt_pass
200 pass_lower_cf(gcc::context
*ctxt
)
201 : gimple_opt_pass(pass_data_lower_cf
, ctxt
)
204 /* opt_pass methods: */
205 unsigned int execute () { return lower_function_body (); }
207 }; // class pass_lower_cf
212 make_pass_lower_cf (gcc::context
*ctxt
)
214 return new pass_lower_cf (ctxt
);
219 /* Verify if the type of the argument matches that of the function
220 declaration. If we cannot verify this or there is a mismatch,
224 gimple_check_call_args (gimple stmt
, tree fndecl
, bool args_count_match
)
227 unsigned int i
, nargs
;
229 /* Calls to internal functions always match their signature. */
230 if (gimple_call_internal_p (stmt
))
233 nargs
= gimple_call_num_args (stmt
);
235 /* Get argument types for verification. */
237 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
239 parms
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
241 /* Verify if the type of the argument matches that of the function
242 declaration. If we cannot verify this or there is a mismatch,
244 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
246 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
248 i
++, p
= DECL_CHAIN (p
))
251 /* We cannot distinguish a varargs function from the case
252 of excess parameters, still deferring the inlining decision
253 to the callee is possible. */
256 arg
= gimple_call_arg (stmt
, i
);
257 if (p
== error_mark_node
258 || arg
== error_mark_node
259 || (!types_compatible_p (DECL_ARG_TYPE (p
), TREE_TYPE (arg
))
260 && !fold_convertible_p (DECL_ARG_TYPE (p
), arg
)))
263 if (args_count_match
&& p
)
268 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
271 /* If this is a varargs function defer inlining decision
275 arg
= gimple_call_arg (stmt
, i
);
276 if (TREE_VALUE (p
) == error_mark_node
277 || arg
== error_mark_node
278 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
279 || (!types_compatible_p (TREE_VALUE (p
), TREE_TYPE (arg
))
280 && !fold_convertible_p (TREE_VALUE (p
), arg
)))
292 /* Verify if the type of the argument and lhs of CALL_STMT matches
293 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
294 true, the arg count needs to be the same.
295 If we cannot verify this or there is a mismatch, return false. */
298 gimple_check_call_matching_types (gimple call_stmt
, tree callee
,
299 bool args_count_match
)
303 if ((DECL_RESULT (callee
)
304 && !DECL_BY_REFERENCE (DECL_RESULT (callee
))
305 && (lhs
= gimple_call_lhs (call_stmt
)) != NULL_TREE
306 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee
)),
308 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee
)), lhs
))
309 || !gimple_check_call_args (call_stmt
, callee
, args_count_match
))
314 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
315 when they are changed -- if this has to be done, the lowering routine must
316 do it explicitly. DATA is passed through the recursion. */
319 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
321 gimple_stmt_iterator gsi
;
323 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
324 lower_stmt (&gsi
, data
);
328 /* Lower the OpenMP directive statement pointed by GSI. DATA is
329 passed through the recursion. */
332 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
336 stmt
= gsi_stmt (*gsi
);
338 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
339 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
340 gimple_omp_set_body (stmt
, NULL
);
345 /* Lower statement GSI. DATA is passed through the recursion. We try to
346 track the fallthruness of statements and get rid of unreachable return
347 statements in order to prevent the EH lowering pass from adding useless
348 edges that can cause bogus warnings to be issued later; this guess need
349 not be 100% accurate, simply be conservative and reset cannot_fallthru
350 to false if we don't know. */
353 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
355 gimple stmt
= gsi_stmt (*gsi
);
357 gimple_set_block (stmt
, data
->block
);
359 switch (gimple_code (stmt
))
362 lower_gimple_bind (gsi
, data
);
363 /* Propagate fallthruness. */
369 data
->cannot_fallthru
= true;
374 if (data
->cannot_fallthru
)
376 gsi_remove (gsi
, false);
377 /* Propagate fallthruness. */
381 lower_gimple_return (gsi
, data
);
382 data
->cannot_fallthru
= true;
387 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
388 lower_try_catch (gsi
, data
);
391 /* It must be a GIMPLE_TRY_FINALLY. */
392 bool cannot_fallthru
;
393 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
394 cannot_fallthru
= data
->cannot_fallthru
;
396 /* The finally clause is always executed after the try clause,
397 so if it does not fall through, then the try-finally will not
398 fall through. Otherwise, if the try clause does not fall
399 through, then when the finally clause falls through it will
400 resume execution wherever the try clause was going. So the
401 whole try-finally will only fall through if both the try
402 clause and the finally clause fall through. */
403 data
->cannot_fallthru
= false;
404 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
405 data
->cannot_fallthru
|= cannot_fallthru
;
411 lower_sequence (gimple_eh_else_n_body_ptr (stmt
), data
);
412 lower_sequence (gimple_eh_else_e_body_ptr (stmt
), data
);
420 case GIMPLE_EH_MUST_NOT_THROW
:
422 case GIMPLE_OMP_SECTIONS
:
423 case GIMPLE_OMP_SECTIONS_SWITCH
:
424 case GIMPLE_OMP_SECTION
:
425 case GIMPLE_OMP_SINGLE
:
426 case GIMPLE_OMP_MASTER
:
427 case GIMPLE_OMP_ORDERED
:
428 case GIMPLE_OMP_CRITICAL
:
429 case GIMPLE_OMP_RETURN
:
430 case GIMPLE_OMP_ATOMIC_LOAD
:
431 case GIMPLE_OMP_ATOMIC_STORE
:
432 case GIMPLE_OMP_CONTINUE
:
437 tree decl
= gimple_call_fndecl (stmt
);
440 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
442 tree arg
= gimple_call_arg (stmt
, i
);
444 TREE_SET_BLOCK (arg
, data
->block
);
448 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
449 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
451 lower_builtin_setjmp (gsi
);
452 data
->cannot_fallthru
= false;
453 data
->calls_builtin_setjmp
= true;
457 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
459 data
->cannot_fallthru
= true;
466 case GIMPLE_OMP_PARALLEL
:
467 case GIMPLE_OMP_TASK
:
468 data
->cannot_fallthru
= false;
469 lower_omp_directive (gsi
, data
);
470 data
->cannot_fallthru
= false;
473 case GIMPLE_TRANSACTION
:
474 lower_sequence (gimple_transaction_body_ptr (stmt
), data
);
481 data
->cannot_fallthru
= false;
485 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
488 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
490 tree old_block
= data
->block
;
491 gimple stmt
= gsi_stmt (*gsi
);
492 tree new_block
= gimple_bind_block (stmt
);
496 if (new_block
== old_block
)
498 /* The outermost block of the original function may not be the
499 outermost statement chain of the gimplified function. So we
500 may see the outermost block just inside the function. */
501 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
506 /* We do not expect to handle duplicate blocks. */
507 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
508 TREE_ASM_WRITTEN (new_block
) = 1;
510 /* Block tree may get clobbered by inlining. Normally this would
511 be fixed in rest_of_decl_compilation using block notes, but
512 since we are not going to emit them, it is up to us. */
513 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
514 BLOCK_SUBBLOCKS (old_block
) = new_block
;
515 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
516 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
518 data
->block
= new_block
;
522 record_vars (gimple_bind_vars (stmt
));
523 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
527 gcc_assert (data
->block
== new_block
);
529 BLOCK_SUBBLOCKS (new_block
)
530 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
531 data
->block
= old_block
;
534 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
535 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
536 gsi_remove (gsi
, false);
539 /* Same as above, but for a GIMPLE_TRY_CATCH. */
542 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
544 bool cannot_fallthru
;
545 gimple stmt
= gsi_stmt (*gsi
);
546 gimple_stmt_iterator i
;
548 /* We don't handle GIMPLE_TRY_FINALLY. */
549 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
551 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
552 cannot_fallthru
= data
->cannot_fallthru
;
554 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
555 switch (gimple_code (gsi_stmt (i
)))
558 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
559 catch expression and a body. The whole try/catch may fall
560 through iff any of the catch bodies falls through. */
561 for (; !gsi_end_p (i
); gsi_next (&i
))
563 data
->cannot_fallthru
= false;
564 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i
)), data
);
565 if (!data
->cannot_fallthru
)
566 cannot_fallthru
= false;
570 case GIMPLE_EH_FILTER
:
571 /* The exception filter expression only matters if there is an
572 exception. If the exception does not match EH_FILTER_TYPES,
573 we will execute EH_FILTER_FAILURE, and we will fall through
574 if that falls through. If the exception does match
575 EH_FILTER_TYPES, the stack unwinder will continue up the
576 stack, so we will not fall through. We don't know whether we
577 will throw an exception which matches EH_FILTER_TYPES or not,
578 so we just ignore EH_FILTER_TYPES and assume that we might
579 throw an exception which doesn't match. */
580 data
->cannot_fallthru
= false;
581 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
582 if (!data
->cannot_fallthru
)
583 cannot_fallthru
= false;
587 /* This case represents statements to be executed when an
588 exception occurs. Those statements are implicitly followed
589 by a GIMPLE_RESX to resume execution after the exception. So
590 in this case the try/catch never falls through. */
591 data
->cannot_fallthru
= false;
592 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
596 data
->cannot_fallthru
= cannot_fallthru
;
600 /* Try to determine whether a TRY_CATCH expression can fall through.
601 This is a subroutine of block_may_fallthru. */
604 try_catch_may_fallthru (const_tree stmt
)
606 tree_stmt_iterator i
;
608 /* If the TRY block can fall through, the whole TRY_CATCH can
610 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
613 i
= tsi_start (TREE_OPERAND (stmt
, 1));
614 switch (TREE_CODE (tsi_stmt (i
)))
617 /* We expect to see a sequence of CATCH_EXPR trees, each with a
618 catch expression and a body. The whole TRY_CATCH may fall
619 through iff any of the catch bodies falls through. */
620 for (; !tsi_end_p (i
); tsi_next (&i
))
622 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
628 /* The exception filter expression only matters if there is an
629 exception. If the exception does not match EH_FILTER_TYPES,
630 we will execute EH_FILTER_FAILURE, and we will fall through
631 if that falls through. If the exception does match
632 EH_FILTER_TYPES, the stack unwinder will continue up the
633 stack, so we will not fall through. We don't know whether we
634 will throw an exception which matches EH_FILTER_TYPES or not,
635 so we just ignore EH_FILTER_TYPES and assume that we might
636 throw an exception which doesn't match. */
637 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
640 /* This case represents statements to be executed when an
641 exception occurs. Those statements are implicitly followed
642 by a RESX statement to resume execution after the exception.
643 So in this case the TRY_CATCH never falls through. */
649 /* Same as above, but for a GIMPLE_TRY_CATCH. */
652 gimple_try_catch_may_fallthru (gimple stmt
)
654 gimple_stmt_iterator i
;
656 /* We don't handle GIMPLE_TRY_FINALLY. */
657 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
659 /* If the TRY block can fall through, the whole TRY_CATCH can
661 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
664 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
665 switch (gimple_code (gsi_stmt (i
)))
668 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
669 catch expression and a body. The whole try/catch may fall
670 through iff any of the catch bodies falls through. */
671 for (; !gsi_end_p (i
); gsi_next (&i
))
673 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
678 case GIMPLE_EH_FILTER
:
679 /* The exception filter expression only matters if there is an
680 exception. If the exception does not match EH_FILTER_TYPES,
681 we will execute EH_FILTER_FAILURE, and we will fall through
682 if that falls through. If the exception does match
683 EH_FILTER_TYPES, the stack unwinder will continue up the
684 stack, so we will not fall through. We don't know whether we
685 will throw an exception which matches EH_FILTER_TYPES or not,
686 so we just ignore EH_FILTER_TYPES and assume that we might
687 throw an exception which doesn't match. */
688 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
691 /* This case represents statements to be executed when an
692 exception occurs. Those statements are implicitly followed
693 by a GIMPLE_RESX to resume execution after the exception. So
694 in this case the try/catch never falls through. */
700 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
701 need not be 100% accurate; simply be conservative and return true if we
702 don't know. This is used only to avoid stupidly generating extra code.
703 If we're wrong, we'll just delete the extra code later. */
706 block_may_fallthru (const_tree block
)
708 /* This CONST_CAST is okay because expr_last returns its argument
709 unmodified and we assign it to a const_tree. */
710 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
712 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
716 /* Easy cases. If the last statement of the block implies
717 control transfer, then we can't fall through. */
721 /* If SWITCH_LABELS is set, this is lowered, and represents a
722 branch to a selected label and hence can not fall through.
723 Otherwise SWITCH_BODY is set, and the switch can fall
725 return SWITCH_LABELS (stmt
) == NULL_TREE
;
728 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
730 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
733 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
736 return try_catch_may_fallthru (stmt
);
738 case TRY_FINALLY_EXPR
:
739 /* The finally clause is always executed after the try clause,
740 so if it does not fall through, then the try-finally will not
741 fall through. Otherwise, if the try clause does not fall
742 through, then when the finally clause falls through it will
743 resume execution wherever the try clause was going. So the
744 whole try-finally will only fall through if both the try
745 clause and the finally clause fall through. */
746 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
747 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
750 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
751 stmt
= TREE_OPERAND (stmt
, 1);
757 /* Functions that do not return do not fall through. */
758 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
760 case CLEANUP_POINT_EXPR
:
761 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
764 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
770 return lang_hooks
.block_may_fallthru (stmt
);
775 /* Try to determine if we can continue executing the statement
776 immediately following STMT. This guess need not be 100% accurate;
777 simply be conservative and return true if we don't know. This is
778 used only to avoid stupidly generating extra code. If we're wrong,
779 we'll just delete the extra code later. */
782 gimple_stmt_may_fallthru (gimple stmt
)
787 switch (gimple_code (stmt
))
792 /* Easy cases. If the last statement of the seq implies
793 control transfer, then we can't fall through. */
797 /* Switch has already been lowered and represents a branch
798 to a selected label and hence can't fall through. */
802 /* GIMPLE_COND's are already lowered into a two-way branch. They
803 can't fall through. */
807 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
810 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
811 return gimple_try_catch_may_fallthru (stmt
);
813 /* It must be a GIMPLE_TRY_FINALLY. */
815 /* The finally clause is always executed after the try clause,
816 so if it does not fall through, then the try-finally will not
817 fall through. Otherwise, if the try clause does not fall
818 through, then when the finally clause falls through it will
819 resume execution wherever the try clause was going. So the
820 whole try-finally will only fall through if both the try
821 clause and the finally clause fall through. */
822 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
823 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
826 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt
))
827 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt
)));
830 /* Functions that do not return do not fall through. */
831 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
839 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
842 gimple_seq_may_fallthru (gimple_seq seq
)
844 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
848 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
851 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
853 gimple stmt
= gsi_stmt (*gsi
);
856 return_statements_t tmp_rs
;
858 /* Match this up with an existing return statement that's been created. */
859 for (i
= data
->return_statements
.length () - 1;
862 tmp_rs
= data
->return_statements
[i
];
864 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
866 /* Remove the line number from the representative return statement.
867 It now fills in for many such returns. Failure to remove this
868 will result in incorrect results for coverage analysis. */
869 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
875 /* Not found. Create a new label and record the return statement. */
876 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
878 data
->return_statements
.safe_push (tmp_rs
);
880 /* Generate a goto statement and remove the return statement. */
882 /* When not optimizing, make sure user returns are preserved. */
883 if (!optimize
&& gimple_has_location (stmt
))
884 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
885 t
= gimple_build_goto (tmp_rs
.label
);
886 gimple_set_location (t
, gimple_location (stmt
));
887 gimple_set_block (t
, gimple_block (stmt
));
888 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
889 gsi_remove (gsi
, false);
892 /* Lower a __builtin_setjmp GSI.
894 __builtin_setjmp is passed a pointer to an array of five words (not
895 all will be used on all machines). It operates similarly to the C
896 library function of the same name, but is more efficient.
898 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
899 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
900 __builtin_setjmp_dispatcher shared among all the instances; that's
901 why it is only emitted at the end by lower_function_body.
903 After full lowering, the body of the function should look like:
912 __builtin_setjmp_setup (&buf, &<D1847>);
916 __builtin_setjmp_receiver (&<D1847>);
919 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
923 __builtin_setjmp_setup (&buf, &<D2847>);
927 __builtin_setjmp_receiver (&<D2847>);
930 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
936 <D3853>: [non-local];
937 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
941 The dispatcher block will be both the unique destination of all the
942 abnormal call edges and the unique source of all the abnormal edges
943 to the receivers, thus keeping the complexity explosion localized. */
946 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
948 gimple stmt
= gsi_stmt (*gsi
);
949 location_t loc
= gimple_location (stmt
);
950 tree cont_label
= create_artificial_label (loc
);
951 tree next_label
= create_artificial_label (loc
);
955 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
956 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
957 FORCED_LABEL (next_label
) = 1;
959 dest
= gimple_call_lhs (stmt
);
961 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
962 arg
= build_addr (next_label
, current_function_decl
);
963 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
964 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
965 gimple_set_location (g
, loc
);
966 gimple_set_block (g
, gimple_block (stmt
));
967 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
969 /* Build 'DEST = 0' and insert. */
972 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
973 gimple_set_location (g
, loc
);
974 gimple_set_block (g
, gimple_block (stmt
));
975 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
978 /* Build 'goto CONT_LABEL' and insert. */
979 g
= gimple_build_goto (cont_label
);
980 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
982 /* Build 'NEXT_LABEL:' and insert. */
983 g
= gimple_build_label (next_label
);
984 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
986 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
987 arg
= build_addr (next_label
, current_function_decl
);
988 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
989 g
= gimple_build_call (t
, 1, arg
);
990 gimple_set_location (g
, loc
);
991 gimple_set_block (g
, gimple_block (stmt
));
992 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
994 /* Build 'DEST = 1' and insert. */
997 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
999 gimple_set_location (g
, loc
);
1000 gimple_set_block (g
, gimple_block (stmt
));
1001 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1004 /* Build 'CONT_LABEL:' and insert. */
1005 g
= gimple_build_label (cont_label
);
1006 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1008 /* Remove the call to __builtin_setjmp. */
1009 gsi_remove (gsi
, false);
1013 /* Record the variables in VARS into function FN. */
1016 record_vars_into (tree vars
, tree fn
)
1018 bool change_cfun
= fn
!= current_function_decl
;
1021 push_cfun (DECL_STRUCT_FUNCTION (fn
));
1023 for (; vars
; vars
= DECL_CHAIN (vars
))
1027 /* BIND_EXPRs contains also function/type/constant declarations
1028 we don't need to care about. */
1029 if (TREE_CODE (var
) != VAR_DECL
)
1032 /* Nothing to do in this case. */
1033 if (DECL_EXTERNAL (var
))
1036 /* Record the variable. */
1037 add_local_decl (cfun
, var
);
1045 /* Record the variables in VARS into current_function_decl. */
1048 record_vars (tree vars
)
1050 record_vars_into (vars
, current_function_decl
);