1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2013 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-iterator.h"
28 #include "tree-inline.h"
32 #include "diagnostic-core.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t
;
61 /* Block the current statement belongs to. */
64 /* A vector of label and return statements to be moved to the end
66 vec
<return_statements_t
> return_statements
;
68 /* True if the current statement cannot fall through. */
71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp
;
75 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
76 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
77 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
78 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
82 /* Lower the body of current_function_decl from High GIMPLE into Low
86 lower_function_body (void)
88 struct lower_data data
;
89 gimple_seq body
= gimple_body (current_function_decl
);
90 gimple_seq lowered_body
;
91 gimple_stmt_iterator i
;
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
99 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
101 memset (&data
, 0, sizeof (data
));
102 data
.block
= DECL_INITIAL (current_function_decl
);
103 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
104 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
105 TREE_ASM_WRITTEN (data
.block
) = 1;
106 data
.return_statements
.create (8);
108 bind
= gimple_seq_first_stmt (body
);
110 gimple_seq_add_stmt (&lowered_body
, bind
);
111 i
= gsi_start (lowered_body
);
112 lower_gimple_bind (&i
, &data
);
114 i
= gsi_last (lowered_body
);
116 /* If the function falls off the end, we need a null return statement.
117 If we've already got one in the return_statements vector, we don't
118 need to do anything special. Otherwise build one by hand. */
119 if (gimple_seq_may_fallthru (lowered_body
)
120 && (data
.return_statements
.is_empty ()
121 || (gimple_return_retval (data
.return_statements
.last().stmt
)
124 x
= gimple_build_return (NULL
);
125 gimple_set_location (x
, cfun
->function_end_locus
);
126 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
127 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
130 /* If we lowered any return statements, emit the representative
131 at the end of the function. */
132 while (!data
.return_statements
.is_empty ())
134 return_statements_t t
= data
.return_statements
.pop ();
135 x
= gimple_build_label (t
.label
);
136 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
137 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
140 /* If the function calls __builtin_setjmp, we need to emit the computed
141 goto that will serve as the unique dispatcher for all the receivers. */
142 if (data
.calls_builtin_setjmp
)
144 tree disp_label
, disp_var
, arg
;
146 /* Build 'DISP_LABEL:' and insert. */
147 disp_label
= create_artificial_label (cfun
->function_end_locus
);
148 /* This mark will create forward edges from every call site. */
149 DECL_NONLOCAL (disp_label
) = 1;
150 cfun
->has_nonlocal_label
= 1;
151 x
= gimple_build_label (disp_label
);
152 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
154 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
156 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
157 arg
= build_addr (disp_label
, current_function_decl
);
158 t
= builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER
);
159 x
= gimple_build_call (t
, 1, arg
);
160 gimple_call_set_lhs (x
, disp_var
);
162 /* Build 'goto DISP_VAR;' and insert. */
163 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
164 x
= gimple_build_goto (disp_var
);
165 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
168 /* Once the old body has been lowered, replace it with the new
170 gimple_set_body (current_function_decl
, lowered_body
);
172 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
173 BLOCK_SUBBLOCKS (data
.block
)
174 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
176 clear_block_marks (data
.block
);
177 data
.return_statements
.release ();
183 const pass_data pass_data_lower_cf
=
185 GIMPLE_PASS
, /* type */
187 OPTGROUP_NONE
, /* optinfo_flags */
188 false, /* has_gate */
189 true, /* has_execute */
191 PROP_gimple_any
, /* properties_required */
192 PROP_gimple_lcf
, /* properties_provided */
193 0, /* properties_destroyed */
194 0, /* todo_flags_start */
195 0, /* todo_flags_finish */
198 class pass_lower_cf
: public gimple_opt_pass
201 pass_lower_cf (gcc::context
*ctxt
)
202 : gimple_opt_pass (pass_data_lower_cf
, ctxt
)
205 /* opt_pass methods: */
206 unsigned int execute () { return lower_function_body (); }
208 }; // class pass_lower_cf
213 make_pass_lower_cf (gcc::context
*ctxt
)
215 return new pass_lower_cf (ctxt
);
220 /* Verify if the type of the argument matches that of the function
221 declaration. If we cannot verify this or there is a mismatch,
225 gimple_check_call_args (gimple stmt
, tree fndecl
, bool args_count_match
)
228 unsigned int i
, nargs
;
230 /* Calls to internal functions always match their signature. */
231 if (gimple_call_internal_p (stmt
))
234 nargs
= gimple_call_num_args (stmt
);
236 /* Get argument types for verification. */
238 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
240 parms
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
242 /* Verify if the type of the argument matches that of the function
243 declaration. If we cannot verify this or there is a mismatch,
245 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
247 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
249 i
++, p
= DECL_CHAIN (p
))
252 /* We cannot distinguish a varargs function from the case
253 of excess parameters, still deferring the inlining decision
254 to the callee is possible. */
257 arg
= gimple_call_arg (stmt
, i
);
258 if (p
== error_mark_node
259 || arg
== error_mark_node
260 || (!types_compatible_p (DECL_ARG_TYPE (p
), TREE_TYPE (arg
))
261 && !fold_convertible_p (DECL_ARG_TYPE (p
), arg
)))
264 if (args_count_match
&& p
)
269 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
272 /* If this is a varargs function defer inlining decision
276 arg
= gimple_call_arg (stmt
, i
);
277 if (TREE_VALUE (p
) == error_mark_node
278 || arg
== error_mark_node
279 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
280 || (!types_compatible_p (TREE_VALUE (p
), TREE_TYPE (arg
))
281 && !fold_convertible_p (TREE_VALUE (p
), arg
)))
293 /* Verify if the type of the argument and lhs of CALL_STMT matches
294 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
295 true, the arg count needs to be the same.
296 If we cannot verify this or there is a mismatch, return false. */
299 gimple_check_call_matching_types (gimple call_stmt
, tree callee
,
300 bool args_count_match
)
304 if ((DECL_RESULT (callee
)
305 && !DECL_BY_REFERENCE (DECL_RESULT (callee
))
306 && (lhs
= gimple_call_lhs (call_stmt
)) != NULL_TREE
307 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee
)),
309 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee
)), lhs
))
310 || !gimple_check_call_args (call_stmt
, callee
, args_count_match
))
315 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
316 when they are changed -- if this has to be done, the lowering routine must
317 do it explicitly. DATA is passed through the recursion. */
320 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
322 gimple_stmt_iterator gsi
;
324 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
325 lower_stmt (&gsi
, data
);
329 /* Lower the OpenMP directive statement pointed by GSI. DATA is
330 passed through the recursion. */
333 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
337 stmt
= gsi_stmt (*gsi
);
339 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
340 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
341 gimple_omp_set_body (stmt
, NULL
);
346 /* Lower statement GSI. DATA is passed through the recursion. We try to
347 track the fallthruness of statements and get rid of unreachable return
348 statements in order to prevent the EH lowering pass from adding useless
349 edges that can cause bogus warnings to be issued later; this guess need
350 not be 100% accurate, simply be conservative and reset cannot_fallthru
351 to false if we don't know. */
354 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
356 gimple stmt
= gsi_stmt (*gsi
);
358 gimple_set_block (stmt
, data
->block
);
360 switch (gimple_code (stmt
))
363 lower_gimple_bind (gsi
, data
);
364 /* Propagate fallthruness. */
370 data
->cannot_fallthru
= true;
375 if (data
->cannot_fallthru
)
377 gsi_remove (gsi
, false);
378 /* Propagate fallthruness. */
382 lower_gimple_return (gsi
, data
);
383 data
->cannot_fallthru
= true;
388 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
389 lower_try_catch (gsi
, data
);
392 /* It must be a GIMPLE_TRY_FINALLY. */
393 bool cannot_fallthru
;
394 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
395 cannot_fallthru
= data
->cannot_fallthru
;
397 /* The finally clause is always executed after the try clause,
398 so if it does not fall through, then the try-finally will not
399 fall through. Otherwise, if the try clause does not fall
400 through, then when the finally clause falls through it will
401 resume execution wherever the try clause was going. So the
402 whole try-finally will only fall through if both the try
403 clause and the finally clause fall through. */
404 data
->cannot_fallthru
= false;
405 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
406 data
->cannot_fallthru
|= cannot_fallthru
;
412 lower_sequence (gimple_eh_else_n_body_ptr (stmt
), data
);
413 lower_sequence (gimple_eh_else_e_body_ptr (stmt
), data
);
421 case GIMPLE_EH_MUST_NOT_THROW
:
423 case GIMPLE_OMP_SECTIONS
:
424 case GIMPLE_OMP_SECTIONS_SWITCH
:
425 case GIMPLE_OMP_SECTION
:
426 case GIMPLE_OMP_SINGLE
:
427 case GIMPLE_OMP_MASTER
:
428 case GIMPLE_OMP_ORDERED
:
429 case GIMPLE_OMP_CRITICAL
:
430 case GIMPLE_OMP_RETURN
:
431 case GIMPLE_OMP_ATOMIC_LOAD
:
432 case GIMPLE_OMP_ATOMIC_STORE
:
433 case GIMPLE_OMP_CONTINUE
:
438 tree decl
= gimple_call_fndecl (stmt
);
441 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
443 tree arg
= gimple_call_arg (stmt
, i
);
445 TREE_SET_BLOCK (arg
, data
->block
);
449 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
450 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
452 lower_builtin_setjmp (gsi
);
453 data
->cannot_fallthru
= false;
454 data
->calls_builtin_setjmp
= true;
458 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
460 data
->cannot_fallthru
= true;
467 case GIMPLE_OMP_PARALLEL
:
468 case GIMPLE_OMP_TASK
:
469 data
->cannot_fallthru
= false;
470 lower_omp_directive (gsi
, data
);
471 data
->cannot_fallthru
= false;
474 case GIMPLE_TRANSACTION
:
475 lower_sequence (gimple_transaction_body_ptr (stmt
), data
);
482 data
->cannot_fallthru
= false;
486 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
489 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
491 tree old_block
= data
->block
;
492 gimple stmt
= gsi_stmt (*gsi
);
493 tree new_block
= gimple_bind_block (stmt
);
497 if (new_block
== old_block
)
499 /* The outermost block of the original function may not be the
500 outermost statement chain of the gimplified function. So we
501 may see the outermost block just inside the function. */
502 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
507 /* We do not expect to handle duplicate blocks. */
508 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
509 TREE_ASM_WRITTEN (new_block
) = 1;
511 /* Block tree may get clobbered by inlining. Normally this would
512 be fixed in rest_of_decl_compilation using block notes, but
513 since we are not going to emit them, it is up to us. */
514 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
515 BLOCK_SUBBLOCKS (old_block
) = new_block
;
516 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
517 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
519 data
->block
= new_block
;
523 record_vars (gimple_bind_vars (stmt
));
524 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
528 gcc_assert (data
->block
== new_block
);
530 BLOCK_SUBBLOCKS (new_block
)
531 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
532 data
->block
= old_block
;
535 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
536 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
537 gsi_remove (gsi
, false);
540 /* Same as above, but for a GIMPLE_TRY_CATCH. */
543 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
545 bool cannot_fallthru
;
546 gimple stmt
= gsi_stmt (*gsi
);
547 gimple_stmt_iterator i
;
549 /* We don't handle GIMPLE_TRY_FINALLY. */
550 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
552 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
553 cannot_fallthru
= data
->cannot_fallthru
;
555 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
556 switch (gimple_code (gsi_stmt (i
)))
559 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
560 catch expression and a body. The whole try/catch may fall
561 through iff any of the catch bodies falls through. */
562 for (; !gsi_end_p (i
); gsi_next (&i
))
564 data
->cannot_fallthru
= false;
565 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i
)), data
);
566 if (!data
->cannot_fallthru
)
567 cannot_fallthru
= false;
571 case GIMPLE_EH_FILTER
:
572 /* The exception filter expression only matters if there is an
573 exception. If the exception does not match EH_FILTER_TYPES,
574 we will execute EH_FILTER_FAILURE, and we will fall through
575 if that falls through. If the exception does match
576 EH_FILTER_TYPES, the stack unwinder will continue up the
577 stack, so we will not fall through. We don't know whether we
578 will throw an exception which matches EH_FILTER_TYPES or not,
579 so we just ignore EH_FILTER_TYPES and assume that we might
580 throw an exception which doesn't match. */
581 data
->cannot_fallthru
= false;
582 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
583 if (!data
->cannot_fallthru
)
584 cannot_fallthru
= false;
588 /* This case represents statements to be executed when an
589 exception occurs. Those statements are implicitly followed
590 by a GIMPLE_RESX to resume execution after the exception. So
591 in this case the try/catch never falls through. */
592 data
->cannot_fallthru
= false;
593 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
597 data
->cannot_fallthru
= cannot_fallthru
;
601 /* Try to determine whether a TRY_CATCH expression can fall through.
602 This is a subroutine of block_may_fallthru. */
605 try_catch_may_fallthru (const_tree stmt
)
607 tree_stmt_iterator i
;
609 /* If the TRY block can fall through, the whole TRY_CATCH can
611 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
614 i
= tsi_start (TREE_OPERAND (stmt
, 1));
615 switch (TREE_CODE (tsi_stmt (i
)))
618 /* We expect to see a sequence of CATCH_EXPR trees, each with a
619 catch expression and a body. The whole TRY_CATCH may fall
620 through iff any of the catch bodies falls through. */
621 for (; !tsi_end_p (i
); tsi_next (&i
))
623 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
629 /* The exception filter expression only matters if there is an
630 exception. If the exception does not match EH_FILTER_TYPES,
631 we will execute EH_FILTER_FAILURE, and we will fall through
632 if that falls through. If the exception does match
633 EH_FILTER_TYPES, the stack unwinder will continue up the
634 stack, so we will not fall through. We don't know whether we
635 will throw an exception which matches EH_FILTER_TYPES or not,
636 so we just ignore EH_FILTER_TYPES and assume that we might
637 throw an exception which doesn't match. */
638 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
641 /* This case represents statements to be executed when an
642 exception occurs. Those statements are implicitly followed
643 by a RESX statement to resume execution after the exception.
644 So in this case the TRY_CATCH never falls through. */
650 /* Same as above, but for a GIMPLE_TRY_CATCH. */
653 gimple_try_catch_may_fallthru (gimple stmt
)
655 gimple_stmt_iterator i
;
657 /* We don't handle GIMPLE_TRY_FINALLY. */
658 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
660 /* If the TRY block can fall through, the whole TRY_CATCH can
662 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
665 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
666 switch (gimple_code (gsi_stmt (i
)))
669 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
670 catch expression and a body. The whole try/catch may fall
671 through iff any of the catch bodies falls through. */
672 for (; !gsi_end_p (i
); gsi_next (&i
))
674 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
679 case GIMPLE_EH_FILTER
:
680 /* The exception filter expression only matters if there is an
681 exception. If the exception does not match EH_FILTER_TYPES,
682 we will execute EH_FILTER_FAILURE, and we will fall through
683 if that falls through. If the exception does match
684 EH_FILTER_TYPES, the stack unwinder will continue up the
685 stack, so we will not fall through. We don't know whether we
686 will throw an exception which matches EH_FILTER_TYPES or not,
687 so we just ignore EH_FILTER_TYPES and assume that we might
688 throw an exception which doesn't match. */
689 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
692 /* This case represents statements to be executed when an
693 exception occurs. Those statements are implicitly followed
694 by a GIMPLE_RESX to resume execution after the exception. So
695 in this case the try/catch never falls through. */
701 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
702 need not be 100% accurate; simply be conservative and return true if we
703 don't know. This is used only to avoid stupidly generating extra code.
704 If we're wrong, we'll just delete the extra code later. */
707 block_may_fallthru (const_tree block
)
709 /* This CONST_CAST is okay because expr_last returns its argument
710 unmodified and we assign it to a const_tree. */
711 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
713 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
717 /* Easy cases. If the last statement of the block implies
718 control transfer, then we can't fall through. */
722 /* If SWITCH_LABELS is set, this is lowered, and represents a
723 branch to a selected label and hence can not fall through.
724 Otherwise SWITCH_BODY is set, and the switch can fall
726 return SWITCH_LABELS (stmt
) == NULL_TREE
;
729 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
731 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
734 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
737 return try_catch_may_fallthru (stmt
);
739 case TRY_FINALLY_EXPR
:
740 /* The finally clause is always executed after the try clause,
741 so if it does not fall through, then the try-finally will not
742 fall through. Otherwise, if the try clause does not fall
743 through, then when the finally clause falls through it will
744 resume execution wherever the try clause was going. So the
745 whole try-finally will only fall through if both the try
746 clause and the finally clause fall through. */
747 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
748 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
751 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
752 stmt
= TREE_OPERAND (stmt
, 1);
758 /* Functions that do not return do not fall through. */
759 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
761 case CLEANUP_POINT_EXPR
:
762 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
765 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
771 return lang_hooks
.block_may_fallthru (stmt
);
776 /* Try to determine if we can continue executing the statement
777 immediately following STMT. This guess need not be 100% accurate;
778 simply be conservative and return true if we don't know. This is
779 used only to avoid stupidly generating extra code. If we're wrong,
780 we'll just delete the extra code later. */
783 gimple_stmt_may_fallthru (gimple stmt
)
788 switch (gimple_code (stmt
))
793 /* Easy cases. If the last statement of the seq implies
794 control transfer, then we can't fall through. */
798 /* Switch has already been lowered and represents a branch
799 to a selected label and hence can't fall through. */
803 /* GIMPLE_COND's are already lowered into a two-way branch. They
804 can't fall through. */
808 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
811 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
812 return gimple_try_catch_may_fallthru (stmt
);
814 /* It must be a GIMPLE_TRY_FINALLY. */
816 /* The finally clause is always executed after the try clause,
817 so if it does not fall through, then the try-finally will not
818 fall through. Otherwise, if the try clause does not fall
819 through, then when the finally clause falls through it will
820 resume execution wherever the try clause was going. So the
821 whole try-finally will only fall through if both the try
822 clause and the finally clause fall through. */
823 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
824 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
827 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt
))
828 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt
)));
831 /* Functions that do not return do not fall through. */
832 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
840 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
843 gimple_seq_may_fallthru (gimple_seq seq
)
845 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
849 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
852 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
854 gimple stmt
= gsi_stmt (*gsi
);
857 return_statements_t tmp_rs
;
859 /* Match this up with an existing return statement that's been created. */
860 for (i
= data
->return_statements
.length () - 1;
863 tmp_rs
= data
->return_statements
[i
];
865 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
867 /* Remove the line number from the representative return statement.
868 It now fills in for many such returns. Failure to remove this
869 will result in incorrect results for coverage analysis. */
870 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
876 /* Not found. Create a new label and record the return statement. */
877 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
879 data
->return_statements
.safe_push (tmp_rs
);
881 /* Generate a goto statement and remove the return statement. */
883 /* When not optimizing, make sure user returns are preserved. */
884 if (!optimize
&& gimple_has_location (stmt
))
885 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
886 t
= gimple_build_goto (tmp_rs
.label
);
887 gimple_set_location (t
, gimple_location (stmt
));
888 gimple_set_block (t
, gimple_block (stmt
));
889 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
890 gsi_remove (gsi
, false);
893 /* Lower a __builtin_setjmp GSI.
895 __builtin_setjmp is passed a pointer to an array of five words (not
896 all will be used on all machines). It operates similarly to the C
897 library function of the same name, but is more efficient.
899 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
900 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
901 __builtin_setjmp_dispatcher shared among all the instances; that's
902 why it is only emitted at the end by lower_function_body.
904 After full lowering, the body of the function should look like:
913 __builtin_setjmp_setup (&buf, &<D1847>);
917 __builtin_setjmp_receiver (&<D1847>);
920 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
924 __builtin_setjmp_setup (&buf, &<D2847>);
928 __builtin_setjmp_receiver (&<D2847>);
931 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
937 <D3853>: [non-local];
938 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
942 The dispatcher block will be both the unique destination of all the
943 abnormal call edges and the unique source of all the abnormal edges
944 to the receivers, thus keeping the complexity explosion localized. */
947 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
949 gimple stmt
= gsi_stmt (*gsi
);
950 location_t loc
= gimple_location (stmt
);
951 tree cont_label
= create_artificial_label (loc
);
952 tree next_label
= create_artificial_label (loc
);
956 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
957 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
958 FORCED_LABEL (next_label
) = 1;
960 dest
= gimple_call_lhs (stmt
);
962 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
963 arg
= build_addr (next_label
, current_function_decl
);
964 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
965 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
966 gimple_set_location (g
, loc
);
967 gimple_set_block (g
, gimple_block (stmt
));
968 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
970 /* Build 'DEST = 0' and insert. */
973 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
974 gimple_set_location (g
, loc
);
975 gimple_set_block (g
, gimple_block (stmt
));
976 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
979 /* Build 'goto CONT_LABEL' and insert. */
980 g
= gimple_build_goto (cont_label
);
981 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
983 /* Build 'NEXT_LABEL:' and insert. */
984 g
= gimple_build_label (next_label
);
985 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
987 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
988 arg
= build_addr (next_label
, current_function_decl
);
989 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
990 g
= gimple_build_call (t
, 1, arg
);
991 gimple_set_location (g
, loc
);
992 gimple_set_block (g
, gimple_block (stmt
));
993 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
995 /* Build 'DEST = 1' and insert. */
998 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
1000 gimple_set_location (g
, loc
);
1001 gimple_set_block (g
, gimple_block (stmt
));
1002 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1005 /* Build 'CONT_LABEL:' and insert. */
1006 g
= gimple_build_label (cont_label
);
1007 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1009 /* Remove the call to __builtin_setjmp. */
1010 gsi_remove (gsi
, false);
1014 /* Record the variables in VARS into function FN. */
1017 record_vars_into (tree vars
, tree fn
)
1019 bool change_cfun
= fn
!= current_function_decl
;
1022 push_cfun (DECL_STRUCT_FUNCTION (fn
));
1024 for (; vars
; vars
= DECL_CHAIN (vars
))
1028 /* BIND_EXPRs contains also function/type/constant declarations
1029 we don't need to care about. */
1030 if (TREE_CODE (var
) != VAR_DECL
)
1033 /* Nothing to do in this case. */
1034 if (DECL_EXTERNAL (var
))
1037 /* Record the variable. */
1038 add_local_decl (cfun
, var
);
1046 /* Record the variables in VARS into current_function_decl. */
1049 record_vars (tree vars
)
1051 record_vars_into (vars
, current_function_decl
);