1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "diagnostic.h"
33 #include "langhooks.h"
34 #include "langhooks-def.h"
35 #include "tree-flow.h"
43 #include "tree-pass.h"
45 /* The differences between High GIMPLE and Low GIMPLE are the
48 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
50 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 flow and exception regions are built as an on-the-side region
52 hierarchy (See tree-eh.c:lower_eh_constructs).
54 3- Multiple identical return statements are grouped into a single
55 return and gotos to the unique return site. */
57 /* Match a return statement with a label. During lowering, we identify
58 identical return statements and replace duplicates with a jump to
59 the corresponding label. */
60 struct return_statements_t
65 typedef struct return_statements_t return_statements_t
;
67 DEF_VEC_O(return_statements_t
);
68 DEF_VEC_ALLOC_O(return_statements_t
,heap
);
72 /* Block the current statement belongs to. */
75 /* A vector of label and return statements to be moved to the end
77 VEC(return_statements_t
,heap
) *return_statements
;
79 /* True if the function calls __builtin_setjmp. */
80 bool calls_builtin_setjmp
;
83 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
84 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
85 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
86 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
89 /* Lower the body of current_function_decl from High GIMPLE into Low
93 lower_function_body (void)
95 struct lower_data data
;
96 gimple_seq body
= gimple_body (current_function_decl
);
97 gimple_seq lowered_body
;
98 gimple_stmt_iterator i
;
103 /* The gimplifier should've left a body of exactly one statement,
104 namely a GIMPLE_BIND. */
105 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
106 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
108 memset (&data
, 0, sizeof (data
));
109 data
.block
= DECL_INITIAL (current_function_decl
);
110 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
111 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
112 TREE_ASM_WRITTEN (data
.block
) = 1;
113 data
.return_statements
= VEC_alloc (return_statements_t
, heap
, 8);
115 bind
= gimple_seq_first_stmt (body
);
117 gimple_seq_add_stmt (&lowered_body
, bind
);
118 i
= gsi_start (lowered_body
);
119 lower_gimple_bind (&i
, &data
);
121 /* Once the old body has been lowered, replace it with the new
123 gimple_set_body (current_function_decl
, lowered_body
);
125 i
= gsi_last (lowered_body
);
127 /* If the function falls off the end, we need a null return statement.
128 If we've already got one in the return_statements vector, we don't
129 need to do anything special. Otherwise build one by hand. */
130 if (gimple_seq_may_fallthru (lowered_body
)
131 && (VEC_empty (return_statements_t
, data
.return_statements
)
132 || gimple_return_retval (VEC_last (return_statements_t
,
133 data
.return_statements
)->stmt
) != NULL
))
135 x
= gimple_build_return (NULL
);
136 gimple_set_location (x
, cfun
->function_end_locus
);
137 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
138 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
141 /* If we lowered any return statements, emit the representative
142 at the end of the function. */
143 while (!VEC_empty (return_statements_t
, data
.return_statements
))
145 return_statements_t t
;
147 /* Unfortunately, we can't use VEC_pop because it returns void for
149 t
= *VEC_last (return_statements_t
, data
.return_statements
);
150 VEC_truncate (return_statements_t
,
151 data
.return_statements
,
152 VEC_length (return_statements_t
,
153 data
.return_statements
) - 1);
155 x
= gimple_build_label (t
.label
);
156 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
158 /* Remove the line number from the representative return statement.
159 It now fills in for many such returns. Failure to remove this
160 will result in incorrect results for coverage analysis. */
161 gimple_set_location (t
.stmt
, UNKNOWN_LOCATION
);
162 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
165 /* If the function calls __builtin_setjmp, we need to emit the computed
166 goto that will serve as the unique dispatcher for all the receivers. */
167 if (data
.calls_builtin_setjmp
)
169 tree disp_label
, disp_var
, arg
;
171 /* Build 'DISP_LABEL:' and insert. */
172 disp_label
= create_artificial_label (cfun
->function_end_locus
);
173 /* This mark will create forward edges from every call site. */
174 DECL_NONLOCAL (disp_label
) = 1;
175 cfun
->has_nonlocal_label
= 1;
176 x
= gimple_build_label (disp_label
);
177 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
179 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
181 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
182 arg
= build_addr (disp_label
, current_function_decl
);
183 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_DISPATCHER
];
184 x
= gimple_build_call (t
, 1, arg
);
185 gimple_call_set_lhs (x
, disp_var
);
187 /* Build 'goto DISP_VAR;' and insert. */
188 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
189 x
= gimple_build_goto (disp_var
);
190 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
193 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
194 BLOCK_SUBBLOCKS (data
.block
)
195 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
197 clear_block_marks (data
.block
);
198 VEC_free(return_statements_t
, heap
, data
.return_statements
);
202 struct gimple_opt_pass pass_lower_cf
=
208 lower_function_body
, /* execute */
211 0, /* static_pass_number */
213 PROP_gimple_any
, /* properties_required */
214 PROP_gimple_lcf
, /* properties_provided */
215 0, /* properties_destroyed */
216 0, /* todo_flags_start */
217 TODO_dump_func
/* todo_flags_finish */
222 /* Verify if the type of the argument matches that of the function
223 declaration. If we cannot verify this or there is a mismatch,
227 gimple_check_call_args (gimple stmt
)
229 tree fndecl
, parms
, p
;
230 unsigned int i
, nargs
;
232 nargs
= gimple_call_num_args (stmt
);
234 /* Get argument types for verification. */
235 fndecl
= gimple_call_fndecl (stmt
);
238 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
239 else if (POINTER_TYPE_P (TREE_TYPE (gimple_call_fn (stmt
))))
240 parms
= TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt
))));
242 /* Verify if the type of the argument matches that of the function
243 declaration. If we cannot verify this or there is a mismatch,
245 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
247 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
249 i
++, p
= TREE_CHAIN (p
))
251 /* We cannot distinguish a varargs function from the case
252 of excess parameters, still deferring the inlining decision
253 to the callee is possible. */
256 if (p
== error_mark_node
257 || gimple_call_arg (stmt
, i
) == error_mark_node
258 || !fold_convertible_p (DECL_ARG_TYPE (p
),
259 gimple_call_arg (stmt
, i
)))
265 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
267 /* If this is a varargs function defer inlining decision
271 if (TREE_VALUE (p
) == error_mark_node
272 || gimple_call_arg (stmt
, i
) == error_mark_node
273 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
274 || !fold_convertible_p (TREE_VALUE (p
),
275 gimple_call_arg (stmt
, i
)))
288 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
289 when they are changed -- if this has to be done, the lowering routine must
290 do it explicitly. DATA is passed through the recursion. */
293 lower_sequence (gimple_seq seq
, struct lower_data
*data
)
295 gimple_stmt_iterator gsi
;
297 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); )
298 lower_stmt (&gsi
, data
);
302 /* Lower the OpenMP directive statement pointed by GSI. DATA is
303 passed through the recursion. */
306 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
310 stmt
= gsi_stmt (*gsi
);
312 lower_sequence (gimple_omp_body (stmt
), data
);
313 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
314 gsi_insert_seq_before (gsi
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
315 gimple_omp_set_body (stmt
, NULL
);
316 gsi_remove (gsi
, false);
320 /* Lower statement GSI. DATA is passed through the recursion. */
323 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
325 gimple stmt
= gsi_stmt (*gsi
);
327 gimple_set_block (stmt
, data
->block
);
329 switch (gimple_code (stmt
))
332 lower_gimple_bind (gsi
, data
);
336 /* The gimplifier has already lowered this into gotos. */
340 lower_gimple_return (gsi
, data
);
344 lower_sequence (gimple_try_eval (stmt
), data
);
345 lower_sequence (gimple_try_cleanup (stmt
), data
);
349 lower_sequence (gimple_catch_handler (stmt
), data
);
352 case GIMPLE_EH_FILTER
:
353 lower_sequence (gimple_eh_filter_failure (stmt
), data
);
363 case GIMPLE_EH_MUST_NOT_THROW
:
365 case GIMPLE_OMP_SECTIONS
:
366 case GIMPLE_OMP_SECTIONS_SWITCH
:
367 case GIMPLE_OMP_SECTION
:
368 case GIMPLE_OMP_SINGLE
:
369 case GIMPLE_OMP_MASTER
:
370 case GIMPLE_OMP_ORDERED
:
371 case GIMPLE_OMP_CRITICAL
:
372 case GIMPLE_OMP_RETURN
:
373 case GIMPLE_OMP_ATOMIC_LOAD
:
374 case GIMPLE_OMP_ATOMIC_STORE
:
375 case GIMPLE_OMP_CONTINUE
:
380 tree decl
= gimple_call_fndecl (stmt
);
383 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
384 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
386 data
->calls_builtin_setjmp
= true;
387 lower_builtin_setjmp (gsi
);
393 case GIMPLE_OMP_PARALLEL
:
394 case GIMPLE_OMP_TASK
:
395 lower_omp_directive (gsi
, data
);
405 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
408 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
410 tree old_block
= data
->block
;
411 gimple stmt
= gsi_stmt (*gsi
);
412 tree new_block
= gimple_bind_block (stmt
);
416 if (new_block
== old_block
)
418 /* The outermost block of the original function may not be the
419 outermost statement chain of the gimplified function. So we
420 may see the outermost block just inside the function. */
421 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
426 /* We do not expect to handle duplicate blocks. */
427 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
428 TREE_ASM_WRITTEN (new_block
) = 1;
430 /* Block tree may get clobbered by inlining. Normally this would
431 be fixed in rest_of_decl_compilation using block notes, but
432 since we are not going to emit them, it is up to us. */
433 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
434 BLOCK_SUBBLOCKS (old_block
) = new_block
;
435 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
436 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
438 data
->block
= new_block
;
442 record_vars (gimple_bind_vars (stmt
));
443 lower_sequence (gimple_bind_body (stmt
), data
);
447 gcc_assert (data
->block
== new_block
);
449 BLOCK_SUBBLOCKS (new_block
)
450 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
451 data
->block
= old_block
;
454 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
455 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
456 gsi_remove (gsi
, false);
459 /* Try to determine whether a TRY_CATCH expression can fall through.
460 This is a subroutine of block_may_fallthru. */
463 try_catch_may_fallthru (const_tree stmt
)
465 tree_stmt_iterator i
;
467 /* If the TRY block can fall through, the whole TRY_CATCH can
469 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
472 i
= tsi_start (TREE_OPERAND (stmt
, 1));
473 switch (TREE_CODE (tsi_stmt (i
)))
476 /* We expect to see a sequence of CATCH_EXPR trees, each with a
477 catch expression and a body. The whole TRY_CATCH may fall
478 through iff any of the catch bodies falls through. */
479 for (; !tsi_end_p (i
); tsi_next (&i
))
481 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
487 /* The exception filter expression only matters if there is an
488 exception. If the exception does not match EH_FILTER_TYPES,
489 we will execute EH_FILTER_FAILURE, and we will fall through
490 if that falls through. If the exception does match
491 EH_FILTER_TYPES, the stack unwinder will continue up the
492 stack, so we will not fall through. We don't know whether we
493 will throw an exception which matches EH_FILTER_TYPES or not,
494 so we just ignore EH_FILTER_TYPES and assume that we might
495 throw an exception which doesn't match. */
496 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
499 /* This case represents statements to be executed when an
500 exception occurs. Those statements are implicitly followed
501 by a RESX statement to resume execution after the exception.
502 So in this case the TRY_CATCH never falls through. */
508 /* Same as above, but for a GIMPLE_TRY_CATCH. */
511 gimple_try_catch_may_fallthru (gimple stmt
)
513 gimple_stmt_iterator i
;
515 /* We don't handle GIMPLE_TRY_FINALLY. */
516 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
518 /* If the TRY block can fall through, the whole TRY_CATCH can
520 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
523 i
= gsi_start (gimple_try_cleanup (stmt
));
524 switch (gimple_code (gsi_stmt (i
)))
527 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
528 catch expression and a body. The whole try/catch may fall
529 through iff any of the catch bodies falls through. */
530 for (; !gsi_end_p (i
); gsi_next (&i
))
532 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
537 case GIMPLE_EH_FILTER
:
538 /* The exception filter expression only matters if there is an
539 exception. If the exception does not match EH_FILTER_TYPES,
540 we will execute EH_FILTER_FAILURE, and we will fall through
541 if that falls through. If the exception does match
542 EH_FILTER_TYPES, the stack unwinder will continue up the
543 stack, so we will not fall through. We don't know whether we
544 will throw an exception which matches EH_FILTER_TYPES or not,
545 so we just ignore EH_FILTER_TYPES and assume that we might
546 throw an exception which doesn't match. */
547 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
550 /* This case represents statements to be executed when an
551 exception occurs. Those statements are implicitly followed
552 by a GIMPLE_RESX to resume execution after the exception. So
553 in this case the try/catch never falls through. */
559 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
560 need not be 100% accurate; simply be conservative and return true if we
561 don't know. This is used only to avoid stupidly generating extra code.
562 If we're wrong, we'll just delete the extra code later. */
565 block_may_fallthru (const_tree block
)
567 /* This CONST_CAST is okay because expr_last returns its argument
568 unmodified and we assign it to a const_tree. */
569 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
571 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
575 /* Easy cases. If the last statement of the block implies
576 control transfer, then we can't fall through. */
580 /* If SWITCH_LABELS is set, this is lowered, and represents a
581 branch to a selected label and hence can not fall through.
582 Otherwise SWITCH_BODY is set, and the switch can fall
584 return SWITCH_LABELS (stmt
) == NULL_TREE
;
587 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
589 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
592 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
595 return try_catch_may_fallthru (stmt
);
597 case TRY_FINALLY_EXPR
:
598 /* The finally clause is always executed after the try clause,
599 so if it does not fall through, then the try-finally will not
600 fall through. Otherwise, if the try clause does not fall
601 through, then when the finally clause falls through it will
602 resume execution wherever the try clause was going. So the
603 whole try-finally will only fall through if both the try
604 clause and the finally clause fall through. */
605 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
606 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
609 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
610 stmt
= TREE_OPERAND (stmt
, 1);
616 /* Functions that do not return do not fall through. */
617 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
619 case CLEANUP_POINT_EXPR
:
620 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
628 /* Try to determine if we can continue executing the statement
629 immediately following STMT. This guess need not be 100% accurate;
630 simply be conservative and return true if we don't know. This is
631 used only to avoid stupidly generating extra code. If we're wrong,
632 we'll just delete the extra code later. */
635 gimple_stmt_may_fallthru (gimple stmt
)
640 switch (gimple_code (stmt
))
645 /* Easy cases. If the last statement of the seq implies
646 control transfer, then we can't fall through. */
650 /* Switch has already been lowered and represents a
651 branch to a selected label and hence can not fall through. */
655 /* GIMPLE_COND's are already lowered into a two-way branch. They
656 can't fall through. */
660 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
663 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
664 return gimple_try_catch_may_fallthru (stmt
);
666 /* It must be a GIMPLE_TRY_FINALLY. */
668 /* The finally clause is always executed after the try clause,
669 so if it does not fall through, then the try-finally will not
670 fall through. Otherwise, if the try clause does not fall
671 through, then when the finally clause falls through it will
672 resume execution wherever the try clause was going. So the
673 whole try-finally will only fall through if both the try
674 clause and the finally clause fall through. */
675 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
676 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
682 /* Functions that do not return do not fall through. */
683 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
691 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
694 gimple_seq_may_fallthru (gimple_seq seq
)
696 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
700 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
703 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
705 gimple stmt
= gsi_stmt (*gsi
);
708 return_statements_t tmp_rs
;
710 /* Match this up with an existing return statement that's been created. */
711 for (i
= VEC_length (return_statements_t
, data
->return_statements
) - 1;
714 tmp_rs
= *VEC_index (return_statements_t
, data
->return_statements
, i
);
716 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
720 /* Not found. Create a new label and record the return statement. */
721 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
723 VEC_safe_push (return_statements_t
, heap
, data
->return_statements
, &tmp_rs
);
725 /* Generate a goto statement and remove the return statement. */
727 t
= gimple_build_goto (tmp_rs
.label
);
728 gimple_set_location (t
, gimple_location (stmt
));
729 gimple_set_block (t
, gimple_block (stmt
));
730 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
731 gsi_remove (gsi
, false);
734 /* Lower a __builtin_setjmp TSI.
736 __builtin_setjmp is passed a pointer to an array of five words (not
737 all will be used on all machines). It operates similarly to the C
738 library function of the same name, but is more efficient.
740 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
741 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
742 __builtin_setjmp_dispatcher shared among all the instances; that's
743 why it is only emitted at the end by lower_function_body.
745 After full lowering, the body of the function should look like:
754 __builtin_setjmp_setup (&buf, &<D1847>);
758 __builtin_setjmp_receiver (&<D1847>);
761 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
765 __builtin_setjmp_setup (&buf, &<D2847>);
769 __builtin_setjmp_receiver (&<D2847>);
772 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
778 <D3853>: [non-local];
779 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
783 The dispatcher block will be both the unique destination of all the
784 abnormal call edges and the unique source of all the abnormal edges
785 to the receivers, thus keeping the complexity explosion localized. */
788 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
790 gimple stmt
= gsi_stmt (*gsi
);
791 location_t loc
= gimple_location (stmt
);
792 tree cont_label
= create_artificial_label (loc
);
793 tree next_label
= create_artificial_label (loc
);
797 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
798 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
799 FORCED_LABEL (next_label
) = 1;
801 dest
= gimple_call_lhs (stmt
);
803 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
804 arg
= build_addr (next_label
, current_function_decl
);
805 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_SETUP
];
806 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
807 gimple_set_location (g
, loc
);
808 gimple_set_block (g
, gimple_block (stmt
));
809 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
811 /* Build 'DEST = 0' and insert. */
814 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
816 gimple_set_location (g
, loc
);
817 gimple_set_block (g
, gimple_block (stmt
));
818 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
821 /* Build 'goto CONT_LABEL' and insert. */
822 g
= gimple_build_goto (cont_label
);
823 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
825 /* Build 'NEXT_LABEL:' and insert. */
826 g
= gimple_build_label (next_label
);
827 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
829 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
830 arg
= build_addr (next_label
, current_function_decl
);
831 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_RECEIVER
];
832 g
= gimple_build_call (t
, 1, arg
);
833 gimple_set_location (g
, loc
);
834 gimple_set_block (g
, gimple_block (stmt
));
835 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
837 /* Build 'DEST = 1' and insert. */
840 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
842 gimple_set_location (g
, loc
);
843 gimple_set_block (g
, gimple_block (stmt
));
844 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
847 /* Build 'CONT_LABEL:' and insert. */
848 g
= gimple_build_label (cont_label
);
849 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
851 /* Remove the call to __builtin_setjmp. */
852 gsi_remove (gsi
, false);
856 /* Record the variables in VARS into function FN. */
859 record_vars_into (tree vars
, tree fn
)
861 if (fn
!= current_function_decl
)
862 push_cfun (DECL_STRUCT_FUNCTION (fn
));
864 for (; vars
; vars
= TREE_CHAIN (vars
))
868 /* BIND_EXPRs contains also function/type/constant declarations
869 we don't need to care about. */
870 if (TREE_CODE (var
) != VAR_DECL
)
873 /* Nothing to do in this case. */
874 if (DECL_EXTERNAL (var
))
877 /* Record the variable. */
878 cfun
->local_decls
= tree_cons (NULL_TREE
, var
,
882 if (fn
!= current_function_decl
)
887 /* Record the variables in VARS into current_function_decl. */
890 record_vars (tree vars
)
892 record_vars_into (vars
, current_function_decl
);