1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t
;
58 DEF_VEC_O(return_statements_t
);
59 DEF_VEC_ALLOC_O(return_statements_t
,heap
);
63 /* Block the current statement belongs to. */
66 /* A vector of label and return statements to be moved to the end
68 VEC(return_statements_t
,heap
) *return_statements
;
70 /* True if the current statement cannot fall through. */
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp
;
77 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
78 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
80 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
83 /* Lower the body of current_function_decl from High GIMPLE into Low
87 lower_function_body (void)
89 struct lower_data data
;
90 gimple_seq body
= gimple_body (current_function_decl
);
91 gimple_seq lowered_body
;
92 gimple_stmt_iterator i
;
97 /* The gimplifier should've left a body of exactly one statement,
98 namely a GIMPLE_BIND. */
99 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
100 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
102 memset (&data
, 0, sizeof (data
));
103 data
.block
= DECL_INITIAL (current_function_decl
);
104 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
105 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
106 TREE_ASM_WRITTEN (data
.block
) = 1;
107 data
.return_statements
= VEC_alloc (return_statements_t
, heap
, 8);
109 bind
= gimple_seq_first_stmt (body
);
111 gimple_seq_add_stmt (&lowered_body
, bind
);
112 i
= gsi_start (lowered_body
);
113 lower_gimple_bind (&i
, &data
);
115 i
= gsi_last (lowered_body
);
117 /* If the function falls off the end, we need a null return statement.
118 If we've already got one in the return_statements vector, we don't
119 need to do anything special. Otherwise build one by hand. */
120 if (gimple_seq_may_fallthru (lowered_body
)
121 && (VEC_empty (return_statements_t
, data
.return_statements
)
122 || gimple_return_retval (VEC_last (return_statements_t
,
123 data
.return_statements
)->stmt
) != NULL
))
125 x
= gimple_build_return (NULL
);
126 gimple_set_location (x
, cfun
->function_end_locus
);
127 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
128 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
131 /* If we lowered any return statements, emit the representative
132 at the end of the function. */
133 while (!VEC_empty (return_statements_t
, data
.return_statements
))
135 return_statements_t t
;
137 /* Unfortunately, we can't use VEC_pop because it returns void for
139 t
= *VEC_last (return_statements_t
, data
.return_statements
);
140 VEC_truncate (return_statements_t
,
141 data
.return_statements
,
142 VEC_length (return_statements_t
,
143 data
.return_statements
) - 1);
145 x
= gimple_build_label (t
.label
);
146 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
147 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
150 /* If the function calls __builtin_setjmp, we need to emit the computed
151 goto that will serve as the unique dispatcher for all the receivers. */
152 if (data
.calls_builtin_setjmp
)
154 tree disp_label
, disp_var
, arg
;
156 /* Build 'DISP_LABEL:' and insert. */
157 disp_label
= create_artificial_label (cfun
->function_end_locus
);
158 /* This mark will create forward edges from every call site. */
159 DECL_NONLOCAL (disp_label
) = 1;
160 cfun
->has_nonlocal_label
= 1;
161 x
= gimple_build_label (disp_label
);
162 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
164 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
166 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
167 arg
= build_addr (disp_label
, current_function_decl
);
168 t
= builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER
);
169 x
= gimple_build_call (t
, 1, arg
);
170 gimple_call_set_lhs (x
, disp_var
);
172 /* Build 'goto DISP_VAR;' and insert. */
173 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
174 x
= gimple_build_goto (disp_var
);
175 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
178 /* Once the old body has been lowered, replace it with the new
180 gimple_set_body (current_function_decl
, lowered_body
);
182 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
183 BLOCK_SUBBLOCKS (data
.block
)
184 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
186 clear_block_marks (data
.block
);
187 VEC_free(return_statements_t
, heap
, data
.return_statements
);
191 struct gimple_opt_pass pass_lower_cf
=
197 lower_function_body
, /* execute */
200 0, /* static_pass_number */
202 PROP_gimple_any
, /* properties_required */
203 PROP_gimple_lcf
, /* properties_provided */
204 0, /* properties_destroyed */
205 0, /* todo_flags_start */
206 0 /* todo_flags_finish */
212 /* Verify if the type of the argument matches that of the function
213 declaration. If we cannot verify this or there is a mismatch,
217 gimple_check_call_args (gimple stmt
, tree fndecl
)
220 unsigned int i
, nargs
;
222 /* Calls to internal functions always match their signature. */
223 if (gimple_call_internal_p (stmt
))
226 nargs
= gimple_call_num_args (stmt
);
228 /* Get argument types for verification. */
230 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
232 parms
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
234 /* Verify if the type of the argument matches that of the function
235 declaration. If we cannot verify this or there is a mismatch,
237 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
239 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
241 i
++, p
= DECL_CHAIN (p
))
244 /* We cannot distinguish a varargs function from the case
245 of excess parameters, still deferring the inlining decision
246 to the callee is possible. */
249 arg
= gimple_call_arg (stmt
, i
);
250 if (p
== error_mark_node
251 || arg
== error_mark_node
252 || (!types_compatible_p (DECL_ARG_TYPE (p
), TREE_TYPE (arg
))
253 && !fold_convertible_p (DECL_ARG_TYPE (p
), arg
)))
259 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
262 /* If this is a varargs function defer inlining decision
266 arg
= gimple_call_arg (stmt
, i
);
267 if (TREE_VALUE (p
) == error_mark_node
268 || arg
== error_mark_node
269 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
270 || (!types_compatible_p (TREE_VALUE (p
), TREE_TYPE (arg
))
271 && !fold_convertible_p (TREE_VALUE (p
), arg
)))
283 /* Verify if the type of the argument and lhs of CALL_STMT matches
284 that of the function declaration CALLEE.
285 If we cannot verify this or there is a mismatch, return false. */
288 gimple_check_call_matching_types (gimple call_stmt
, tree callee
)
292 if ((DECL_RESULT (callee
)
293 && !DECL_BY_REFERENCE (DECL_RESULT (callee
))
294 && (lhs
= gimple_call_lhs (call_stmt
)) != NULL_TREE
295 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee
)),
297 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee
)), lhs
))
298 || !gimple_check_call_args (call_stmt
, callee
))
303 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
304 when they are changed -- if this has to be done, the lowering routine must
305 do it explicitly. DATA is passed through the recursion. */
308 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
310 gimple_stmt_iterator gsi
;
312 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
313 lower_stmt (&gsi
, data
);
317 /* Lower the OpenMP directive statement pointed by GSI. DATA is
318 passed through the recursion. */
321 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
325 stmt
= gsi_stmt (*gsi
);
327 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
328 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
329 gimple_omp_set_body (stmt
, NULL
);
334 /* Lower statement GSI. DATA is passed through the recursion. We try to
335 track the fallthruness of statements and get rid of unreachable return
336 statements in order to prevent the EH lowering pass from adding useless
337 edges that can cause bogus warnings to be issued later; this guess need
338 not be 100% accurate, simply be conservative and reset cannot_fallthru
339 to false if we don't know. */
342 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
344 gimple stmt
= gsi_stmt (*gsi
);
346 gimple_set_block (stmt
, data
->block
);
348 switch (gimple_code (stmt
))
351 lower_gimple_bind (gsi
, data
);
352 /* Propagate fallthruness. */
358 data
->cannot_fallthru
= true;
363 if (data
->cannot_fallthru
)
365 gsi_remove (gsi
, false);
366 /* Propagate fallthruness. */
370 lower_gimple_return (gsi
, data
);
371 data
->cannot_fallthru
= true;
377 bool try_cannot_fallthru
;
378 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
379 try_cannot_fallthru
= data
->cannot_fallthru
;
380 data
->cannot_fallthru
= false;
381 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
382 /* See gimple_stmt_may_fallthru for the rationale. */
383 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
385 data
->cannot_fallthru
|= try_cannot_fallthru
;
393 data
->cannot_fallthru
= false;
394 lower_sequence (gimple_catch_handler_ptr (stmt
), data
);
397 case GIMPLE_EH_FILTER
:
398 data
->cannot_fallthru
= false;
399 lower_sequence (gimple_eh_filter_failure_ptr (stmt
), data
);
403 lower_sequence (gimple_eh_else_n_body_ptr (stmt
), data
);
404 lower_sequence (gimple_eh_else_e_body_ptr (stmt
), data
);
412 case GIMPLE_EH_MUST_NOT_THROW
:
414 case GIMPLE_OMP_SECTIONS
:
415 case GIMPLE_OMP_SECTIONS_SWITCH
:
416 case GIMPLE_OMP_SECTION
:
417 case GIMPLE_OMP_SINGLE
:
418 case GIMPLE_OMP_MASTER
:
419 case GIMPLE_OMP_ORDERED
:
420 case GIMPLE_OMP_CRITICAL
:
421 case GIMPLE_OMP_RETURN
:
422 case GIMPLE_OMP_ATOMIC_LOAD
:
423 case GIMPLE_OMP_ATOMIC_STORE
:
424 case GIMPLE_OMP_CONTINUE
:
429 tree decl
= gimple_call_fndecl (stmt
);
432 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
433 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
435 lower_builtin_setjmp (gsi
);
436 data
->cannot_fallthru
= false;
437 data
->calls_builtin_setjmp
= true;
441 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
443 data
->cannot_fallthru
= true;
450 case GIMPLE_OMP_PARALLEL
:
451 case GIMPLE_OMP_TASK
:
452 data
->cannot_fallthru
= false;
453 lower_omp_directive (gsi
, data
);
454 data
->cannot_fallthru
= false;
457 case GIMPLE_TRANSACTION
:
458 lower_sequence (gimple_transaction_body_ptr (stmt
), data
);
465 data
->cannot_fallthru
= false;
469 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
472 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
474 tree old_block
= data
->block
;
475 gimple stmt
= gsi_stmt (*gsi
);
476 tree new_block
= gimple_bind_block (stmt
);
480 if (new_block
== old_block
)
482 /* The outermost block of the original function may not be the
483 outermost statement chain of the gimplified function. So we
484 may see the outermost block just inside the function. */
485 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
490 /* We do not expect to handle duplicate blocks. */
491 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
492 TREE_ASM_WRITTEN (new_block
) = 1;
494 /* Block tree may get clobbered by inlining. Normally this would
495 be fixed in rest_of_decl_compilation using block notes, but
496 since we are not going to emit them, it is up to us. */
497 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
498 BLOCK_SUBBLOCKS (old_block
) = new_block
;
499 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
500 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
502 data
->block
= new_block
;
506 record_vars (gimple_bind_vars (stmt
));
507 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
511 gcc_assert (data
->block
== new_block
);
513 BLOCK_SUBBLOCKS (new_block
)
514 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
515 data
->block
= old_block
;
518 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
519 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
520 gsi_remove (gsi
, false);
523 /* Try to determine whether a TRY_CATCH expression can fall through.
524 This is a subroutine of block_may_fallthru. */
527 try_catch_may_fallthru (const_tree stmt
)
529 tree_stmt_iterator i
;
531 /* If the TRY block can fall through, the whole TRY_CATCH can
533 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
536 i
= tsi_start (TREE_OPERAND (stmt
, 1));
537 switch (TREE_CODE (tsi_stmt (i
)))
540 /* We expect to see a sequence of CATCH_EXPR trees, each with a
541 catch expression and a body. The whole TRY_CATCH may fall
542 through iff any of the catch bodies falls through. */
543 for (; !tsi_end_p (i
); tsi_next (&i
))
545 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
551 /* The exception filter expression only matters if there is an
552 exception. If the exception does not match EH_FILTER_TYPES,
553 we will execute EH_FILTER_FAILURE, and we will fall through
554 if that falls through. If the exception does match
555 EH_FILTER_TYPES, the stack unwinder will continue up the
556 stack, so we will not fall through. We don't know whether we
557 will throw an exception which matches EH_FILTER_TYPES or not,
558 so we just ignore EH_FILTER_TYPES and assume that we might
559 throw an exception which doesn't match. */
560 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
563 /* This case represents statements to be executed when an
564 exception occurs. Those statements are implicitly followed
565 by a RESX statement to resume execution after the exception.
566 So in this case the TRY_CATCH never falls through. */
572 /* Same as above, but for a GIMPLE_TRY_CATCH. */
575 gimple_try_catch_may_fallthru (gimple stmt
)
577 gimple_stmt_iterator i
;
579 /* We don't handle GIMPLE_TRY_FINALLY. */
580 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
582 /* If the TRY block can fall through, the whole TRY_CATCH can
584 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
587 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
588 switch (gimple_code (gsi_stmt (i
)))
591 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
592 catch expression and a body. The whole try/catch may fall
593 through iff any of the catch bodies falls through. */
594 for (; !gsi_end_p (i
); gsi_next (&i
))
596 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
601 case GIMPLE_EH_FILTER
:
602 /* The exception filter expression only matters if there is an
603 exception. If the exception does not match EH_FILTER_TYPES,
604 we will execute EH_FILTER_FAILURE, and we will fall through
605 if that falls through. If the exception does match
606 EH_FILTER_TYPES, the stack unwinder will continue up the
607 stack, so we will not fall through. We don't know whether we
608 will throw an exception which matches EH_FILTER_TYPES or not,
609 so we just ignore EH_FILTER_TYPES and assume that we might
610 throw an exception which doesn't match. */
611 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
614 /* This case represents statements to be executed when an
615 exception occurs. Those statements are implicitly followed
616 by a GIMPLE_RESX to resume execution after the exception. So
617 in this case the try/catch never falls through. */
623 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
624 need not be 100% accurate; simply be conservative and return true if we
625 don't know. This is used only to avoid stupidly generating extra code.
626 If we're wrong, we'll just delete the extra code later. */
629 block_may_fallthru (const_tree block
)
631 /* This CONST_CAST is okay because expr_last returns its argument
632 unmodified and we assign it to a const_tree. */
633 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
635 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
639 /* Easy cases. If the last statement of the block implies
640 control transfer, then we can't fall through. */
644 /* If SWITCH_LABELS is set, this is lowered, and represents a
645 branch to a selected label and hence can not fall through.
646 Otherwise SWITCH_BODY is set, and the switch can fall
648 return SWITCH_LABELS (stmt
) == NULL_TREE
;
651 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
653 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
656 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
659 return try_catch_may_fallthru (stmt
);
661 case TRY_FINALLY_EXPR
:
662 /* The finally clause is always executed after the try clause,
663 so if it does not fall through, then the try-finally will not
664 fall through. Otherwise, if the try clause does not fall
665 through, then when the finally clause falls through it will
666 resume execution wherever the try clause was going. So the
667 whole try-finally will only fall through if both the try
668 clause and the finally clause fall through. */
669 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
670 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
673 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
674 stmt
= TREE_OPERAND (stmt
, 1);
680 /* Functions that do not return do not fall through. */
681 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
683 case CLEANUP_POINT_EXPR
:
684 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
692 /* Try to determine if we can continue executing the statement
693 immediately following STMT. This guess need not be 100% accurate;
694 simply be conservative and return true if we don't know. This is
695 used only to avoid stupidly generating extra code. If we're wrong,
696 we'll just delete the extra code later. */
699 gimple_stmt_may_fallthru (gimple stmt
)
704 switch (gimple_code (stmt
))
709 /* Easy cases. If the last statement of the seq implies
710 control transfer, then we can't fall through. */
714 /* Switch has already been lowered and represents a branch
715 to a selected label and hence can't fall through. */
719 /* GIMPLE_COND's are already lowered into a two-way branch. They
720 can't fall through. */
724 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
727 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
728 return gimple_try_catch_may_fallthru (stmt
);
730 /* It must be a GIMPLE_TRY_FINALLY. */
732 /* The finally clause is always executed after the try clause,
733 so if it does not fall through, then the try-finally will not
734 fall through. Otherwise, if the try clause does not fall
735 through, then when the finally clause falls through it will
736 resume execution wherever the try clause was going. So the
737 whole try-finally will only fall through if both the try
738 clause and the finally clause fall through. */
739 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
740 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
743 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt
))
744 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt
)));
747 /* Functions that do not return do not fall through. */
748 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
756 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
759 gimple_seq_may_fallthru (gimple_seq seq
)
761 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
765 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
768 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
770 gimple stmt
= gsi_stmt (*gsi
);
773 return_statements_t tmp_rs
;
775 /* Match this up with an existing return statement that's been created. */
776 for (i
= VEC_length (return_statements_t
, data
->return_statements
) - 1;
779 tmp_rs
= *VEC_index (return_statements_t
, data
->return_statements
, i
);
781 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
783 /* Remove the line number from the representative return statement.
784 It now fills in for many such returns. Failure to remove this
785 will result in incorrect results for coverage analysis. */
786 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
792 /* Not found. Create a new label and record the return statement. */
793 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
795 VEC_safe_push (return_statements_t
, heap
, data
->return_statements
, &tmp_rs
);
797 /* Generate a goto statement and remove the return statement. */
799 /* When not optimizing, make sure user returns are preserved. */
800 if (!optimize
&& gimple_has_location (stmt
))
801 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
802 t
= gimple_build_goto (tmp_rs
.label
);
803 gimple_set_location (t
, gimple_location (stmt
));
804 gimple_set_block (t
, gimple_block (stmt
));
805 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
806 gsi_remove (gsi
, false);
809 /* Lower a __builtin_setjmp GSI.
811 __builtin_setjmp is passed a pointer to an array of five words (not
812 all will be used on all machines). It operates similarly to the C
813 library function of the same name, but is more efficient.
815 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
816 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
817 __builtin_setjmp_dispatcher shared among all the instances; that's
818 why it is only emitted at the end by lower_function_body.
820 After full lowering, the body of the function should look like:
829 __builtin_setjmp_setup (&buf, &<D1847>);
833 __builtin_setjmp_receiver (&<D1847>);
836 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
840 __builtin_setjmp_setup (&buf, &<D2847>);
844 __builtin_setjmp_receiver (&<D2847>);
847 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
853 <D3853>: [non-local];
854 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
858 The dispatcher block will be both the unique destination of all the
859 abnormal call edges and the unique source of all the abnormal edges
860 to the receivers, thus keeping the complexity explosion localized. */
863 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
865 gimple stmt
= gsi_stmt (*gsi
);
866 location_t loc
= gimple_location (stmt
);
867 tree cont_label
= create_artificial_label (loc
);
868 tree next_label
= create_artificial_label (loc
);
872 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
873 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
874 FORCED_LABEL (next_label
) = 1;
876 dest
= gimple_call_lhs (stmt
);
878 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
879 arg
= build_addr (next_label
, current_function_decl
);
880 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
881 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
882 gimple_set_location (g
, loc
);
883 gimple_set_block (g
, gimple_block (stmt
));
884 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
886 /* Build 'DEST = 0' and insert. */
889 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
890 gimple_set_location (g
, loc
);
891 gimple_set_block (g
, gimple_block (stmt
));
892 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
895 /* Build 'goto CONT_LABEL' and insert. */
896 g
= gimple_build_goto (cont_label
);
897 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
899 /* Build 'NEXT_LABEL:' and insert. */
900 g
= gimple_build_label (next_label
);
901 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
903 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
904 arg
= build_addr (next_label
, current_function_decl
);
905 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
906 g
= gimple_build_call (t
, 1, arg
);
907 gimple_set_location (g
, loc
);
908 gimple_set_block (g
, gimple_block (stmt
));
909 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
911 /* Build 'DEST = 1' and insert. */
914 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
916 gimple_set_location (g
, loc
);
917 gimple_set_block (g
, gimple_block (stmt
));
918 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
921 /* Build 'CONT_LABEL:' and insert. */
922 g
= gimple_build_label (cont_label
);
923 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
925 /* Remove the call to __builtin_setjmp. */
926 gsi_remove (gsi
, false);
930 /* Record the variables in VARS into function FN. */
933 record_vars_into (tree vars
, tree fn
)
935 if (fn
!= current_function_decl
)
936 push_cfun (DECL_STRUCT_FUNCTION (fn
));
938 for (; vars
; vars
= DECL_CHAIN (vars
))
942 /* BIND_EXPRs contains also function/type/constant declarations
943 we don't need to care about. */
944 if (TREE_CODE (var
) != VAR_DECL
)
947 /* Nothing to do in this case. */
948 if (DECL_EXTERNAL (var
))
951 /* Record the variable. */
952 add_local_decl (cfun
, var
);
953 if (gimple_referenced_vars (cfun
))
954 add_referenced_var (var
);
957 if (fn
!= current_function_decl
)
962 /* Record the variables in VARS into current_function_decl. */
965 record_vars (tree vars
)
967 record_vars_into (vars
, current_function_decl
);