1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
33 #include "diagnostic-core.h"
35 #include "tree-pass.h"
37 /* The differences between High GIMPLE and Low GIMPLE are the
40 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
42 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43 flow and exception regions are built as an on-the-side region
44 hierarchy (See tree-eh.c:lower_eh_constructs).
46 3- Multiple identical return statements are grouped into a single
47 return and gotos to the unique return site. */
49 /* Match a return statement with a label. During lowering, we identify
50 identical return statements and replace duplicates with a jump to
51 the corresponding label. */
52 struct return_statements_t
57 typedef struct return_statements_t return_statements_t
;
59 DEF_VEC_O(return_statements_t
);
60 DEF_VEC_ALLOC_O(return_statements_t
,heap
);
64 /* Block the current statement belongs to. */
67 /* A vector of label and return statements to be moved to the end
69 VEC(return_statements_t
,heap
) *return_statements
;
71 /* True if the current statement cannot fall through. */
74 /* True if the function calls __builtin_setjmp. */
75 bool calls_builtin_setjmp
;
78 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
80 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
81 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
84 /* Lower the body of current_function_decl from High GIMPLE into Low
88 lower_function_body (void)
90 struct lower_data data
;
91 gimple_seq body
= gimple_body (current_function_decl
);
92 gimple_seq lowered_body
;
93 gimple_stmt_iterator i
;
98 /* The gimplifier should've left a body of exactly one statement,
99 namely a GIMPLE_BIND. */
100 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
101 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
103 memset (&data
, 0, sizeof (data
));
104 data
.block
= DECL_INITIAL (current_function_decl
);
105 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
106 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
107 TREE_ASM_WRITTEN (data
.block
) = 1;
108 data
.return_statements
= VEC_alloc (return_statements_t
, heap
, 8);
110 bind
= gimple_seq_first_stmt (body
);
112 gimple_seq_add_stmt (&lowered_body
, bind
);
113 i
= gsi_start (lowered_body
);
114 lower_gimple_bind (&i
, &data
);
116 /* Once the old body has been lowered, replace it with the new
118 gimple_set_body (current_function_decl
, lowered_body
);
120 i
= gsi_last (lowered_body
);
122 /* If the function falls off the end, we need a null return statement.
123 If we've already got one in the return_statements vector, we don't
124 need to do anything special. Otherwise build one by hand. */
125 if (gimple_seq_may_fallthru (lowered_body
)
126 && (VEC_empty (return_statements_t
, data
.return_statements
)
127 || gimple_return_retval (VEC_last (return_statements_t
,
128 data
.return_statements
)->stmt
) != NULL
))
130 x
= gimple_build_return (NULL
);
131 gimple_set_location (x
, cfun
->function_end_locus
);
132 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
133 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
136 /* If we lowered any return statements, emit the representative
137 at the end of the function. */
138 while (!VEC_empty (return_statements_t
, data
.return_statements
))
140 return_statements_t t
;
142 /* Unfortunately, we can't use VEC_pop because it returns void for
144 t
= *VEC_last (return_statements_t
, data
.return_statements
);
145 VEC_truncate (return_statements_t
,
146 data
.return_statements
,
147 VEC_length (return_statements_t
,
148 data
.return_statements
) - 1);
150 x
= gimple_build_label (t
.label
);
151 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
153 /* Remove the line number from the representative return statement.
154 It now fills in for many such returns. Failure to remove this
155 will result in incorrect results for coverage analysis. */
156 gimple_set_location (t
.stmt
, UNKNOWN_LOCATION
);
157 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
160 /* If the function calls __builtin_setjmp, we need to emit the computed
161 goto that will serve as the unique dispatcher for all the receivers. */
162 if (data
.calls_builtin_setjmp
)
164 tree disp_label
, disp_var
, arg
;
166 /* Build 'DISP_LABEL:' and insert. */
167 disp_label
= create_artificial_label (cfun
->function_end_locus
);
168 /* This mark will create forward edges from every call site. */
169 DECL_NONLOCAL (disp_label
) = 1;
170 cfun
->has_nonlocal_label
= 1;
171 x
= gimple_build_label (disp_label
);
172 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
174 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
176 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
177 arg
= build_addr (disp_label
, current_function_decl
);
178 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_DISPATCHER
];
179 x
= gimple_build_call (t
, 1, arg
);
180 gimple_call_set_lhs (x
, disp_var
);
182 /* Build 'goto DISP_VAR;' and insert. */
183 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
184 x
= gimple_build_goto (disp_var
);
185 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
188 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
189 BLOCK_SUBBLOCKS (data
.block
)
190 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
192 clear_block_marks (data
.block
);
193 VEC_free(return_statements_t
, heap
, data
.return_statements
);
197 struct gimple_opt_pass pass_lower_cf
=
203 lower_function_body
, /* execute */
206 0, /* static_pass_number */
208 PROP_gimple_any
, /* properties_required */
209 PROP_gimple_lcf
, /* properties_provided */
210 0, /* properties_destroyed */
211 0, /* todo_flags_start */
212 TODO_dump_func
/* todo_flags_finish */
217 /* Verify if the type of the argument matches that of the function
218 declaration. If we cannot verify this or there is a mismatch,
222 gimple_check_call_args (gimple stmt
)
224 tree fndecl
, parms
, p
;
225 unsigned int i
, nargs
;
227 nargs
= gimple_call_num_args (stmt
);
229 /* Get argument types for verification. */
230 fndecl
= gimple_call_fndecl (stmt
);
233 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
234 else if (POINTER_TYPE_P (TREE_TYPE (gimple_call_fn (stmt
))))
235 parms
= TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt
))));
237 /* Verify if the type of the argument matches that of the function
238 declaration. If we cannot verify this or there is a mismatch,
240 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
242 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
244 i
++, p
= TREE_CHAIN (p
))
246 /* We cannot distinguish a varargs function from the case
247 of excess parameters, still deferring the inlining decision
248 to the callee is possible. */
251 if (p
== error_mark_node
252 || gimple_call_arg (stmt
, i
) == error_mark_node
253 || !fold_convertible_p (DECL_ARG_TYPE (p
),
254 gimple_call_arg (stmt
, i
)))
260 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
262 /* If this is a varargs function defer inlining decision
266 if (TREE_VALUE (p
) == error_mark_node
267 || gimple_call_arg (stmt
, i
) == error_mark_node
268 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
269 || !fold_convertible_p (TREE_VALUE (p
),
270 gimple_call_arg (stmt
, i
)))
283 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
284 when they are changed -- if this has to be done, the lowering routine must
285 do it explicitly. DATA is passed through the recursion. */
288 lower_sequence (gimple_seq seq
, struct lower_data
*data
)
290 gimple_stmt_iterator gsi
;
292 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); )
293 lower_stmt (&gsi
, data
);
297 /* Lower the OpenMP directive statement pointed by GSI. DATA is
298 passed through the recursion. */
301 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
305 stmt
= gsi_stmt (*gsi
);
307 lower_sequence (gimple_omp_body (stmt
), data
);
308 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
309 gsi_insert_seq_before (gsi
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
310 gimple_omp_set_body (stmt
, NULL
);
311 gsi_remove (gsi
, false);
315 /* Lower statement GSI. DATA is passed through the recursion. We try to
316 track the fallthruness of statements and get rid of unreachable return
317 statements in order to prevent the EH lowering pass from adding useless
318 edges that can cause bogus warnings to be issued later; this guess need
319 not be 100% accurate, simply be conservative and reset cannot_fallthru
320 to false if we don't know. */
323 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
325 gimple stmt
= gsi_stmt (*gsi
);
327 gimple_set_block (stmt
, data
->block
);
329 switch (gimple_code (stmt
))
332 lower_gimple_bind (gsi
, data
);
333 /* Propagate fallthruness. */
339 data
->cannot_fallthru
= true;
344 if (data
->cannot_fallthru
)
346 gsi_remove (gsi
, false);
347 /* Propagate fallthruness. */
351 lower_gimple_return (gsi
, data
);
352 data
->cannot_fallthru
= true;
358 bool try_cannot_fallthru
;
359 lower_sequence (gimple_try_eval (stmt
), data
);
360 try_cannot_fallthru
= data
->cannot_fallthru
;
361 data
->cannot_fallthru
= false;
362 lower_sequence (gimple_try_cleanup (stmt
), data
);
363 /* See gimple_stmt_may_fallthru for the rationale. */
364 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
366 data
->cannot_fallthru
|= try_cannot_fallthru
;
374 data
->cannot_fallthru
= false;
375 lower_sequence (gimple_catch_handler (stmt
), data
);
378 case GIMPLE_EH_FILTER
:
379 data
->cannot_fallthru
= false;
380 lower_sequence (gimple_eh_filter_failure (stmt
), data
);
388 case GIMPLE_EH_MUST_NOT_THROW
:
390 case GIMPLE_OMP_SECTIONS
:
391 case GIMPLE_OMP_SECTIONS_SWITCH
:
392 case GIMPLE_OMP_SECTION
:
393 case GIMPLE_OMP_SINGLE
:
394 case GIMPLE_OMP_MASTER
:
395 case GIMPLE_OMP_ORDERED
:
396 case GIMPLE_OMP_CRITICAL
:
397 case GIMPLE_OMP_RETURN
:
398 case GIMPLE_OMP_ATOMIC_LOAD
:
399 case GIMPLE_OMP_ATOMIC_STORE
:
400 case GIMPLE_OMP_CONTINUE
:
405 tree decl
= gimple_call_fndecl (stmt
);
408 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
409 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
411 lower_builtin_setjmp (gsi
);
412 data
->cannot_fallthru
= false;
413 data
->calls_builtin_setjmp
= true;
417 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
419 data
->cannot_fallthru
= true;
426 case GIMPLE_OMP_PARALLEL
:
427 case GIMPLE_OMP_TASK
:
428 data
->cannot_fallthru
= false;
429 lower_omp_directive (gsi
, data
);
430 data
->cannot_fallthru
= false;
437 data
->cannot_fallthru
= false;
441 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
444 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
446 tree old_block
= data
->block
;
447 gimple stmt
= gsi_stmt (*gsi
);
448 tree new_block
= gimple_bind_block (stmt
);
452 if (new_block
== old_block
)
454 /* The outermost block of the original function may not be the
455 outermost statement chain of the gimplified function. So we
456 may see the outermost block just inside the function. */
457 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
462 /* We do not expect to handle duplicate blocks. */
463 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
464 TREE_ASM_WRITTEN (new_block
) = 1;
466 /* Block tree may get clobbered by inlining. Normally this would
467 be fixed in rest_of_decl_compilation using block notes, but
468 since we are not going to emit them, it is up to us. */
469 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
470 BLOCK_SUBBLOCKS (old_block
) = new_block
;
471 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
472 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
474 data
->block
= new_block
;
478 record_vars (gimple_bind_vars (stmt
));
479 lower_sequence (gimple_bind_body (stmt
), data
);
483 gcc_assert (data
->block
== new_block
);
485 BLOCK_SUBBLOCKS (new_block
)
486 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
487 data
->block
= old_block
;
490 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
491 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
492 gsi_remove (gsi
, false);
495 /* Try to determine whether a TRY_CATCH expression can fall through.
496 This is a subroutine of block_may_fallthru. */
499 try_catch_may_fallthru (const_tree stmt
)
501 tree_stmt_iterator i
;
503 /* If the TRY block can fall through, the whole TRY_CATCH can
505 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
508 i
= tsi_start (TREE_OPERAND (stmt
, 1));
509 switch (TREE_CODE (tsi_stmt (i
)))
512 /* We expect to see a sequence of CATCH_EXPR trees, each with a
513 catch expression and a body. The whole TRY_CATCH may fall
514 through iff any of the catch bodies falls through. */
515 for (; !tsi_end_p (i
); tsi_next (&i
))
517 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
523 /* The exception filter expression only matters if there is an
524 exception. If the exception does not match EH_FILTER_TYPES,
525 we will execute EH_FILTER_FAILURE, and we will fall through
526 if that falls through. If the exception does match
527 EH_FILTER_TYPES, the stack unwinder will continue up the
528 stack, so we will not fall through. We don't know whether we
529 will throw an exception which matches EH_FILTER_TYPES or not,
530 so we just ignore EH_FILTER_TYPES and assume that we might
531 throw an exception which doesn't match. */
532 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
535 /* This case represents statements to be executed when an
536 exception occurs. Those statements are implicitly followed
537 by a RESX statement to resume execution after the exception.
538 So in this case the TRY_CATCH never falls through. */
544 /* Same as above, but for a GIMPLE_TRY_CATCH. */
547 gimple_try_catch_may_fallthru (gimple stmt
)
549 gimple_stmt_iterator i
;
551 /* We don't handle GIMPLE_TRY_FINALLY. */
552 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
554 /* If the TRY block can fall through, the whole TRY_CATCH can
556 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
559 i
= gsi_start (gimple_try_cleanup (stmt
));
560 switch (gimple_code (gsi_stmt (i
)))
563 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
564 catch expression and a body. The whole try/catch may fall
565 through iff any of the catch bodies falls through. */
566 for (; !gsi_end_p (i
); gsi_next (&i
))
568 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
573 case GIMPLE_EH_FILTER
:
574 /* The exception filter expression only matters if there is an
575 exception. If the exception does not match EH_FILTER_TYPES,
576 we will execute EH_FILTER_FAILURE, and we will fall through
577 if that falls through. If the exception does match
578 EH_FILTER_TYPES, the stack unwinder will continue up the
579 stack, so we will not fall through. We don't know whether we
580 will throw an exception which matches EH_FILTER_TYPES or not,
581 so we just ignore EH_FILTER_TYPES and assume that we might
582 throw an exception which doesn't match. */
583 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
586 /* This case represents statements to be executed when an
587 exception occurs. Those statements are implicitly followed
588 by a GIMPLE_RESX to resume execution after the exception. So
589 in this case the try/catch never falls through. */
595 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
596 need not be 100% accurate; simply be conservative and return true if we
597 don't know. This is used only to avoid stupidly generating extra code.
598 If we're wrong, we'll just delete the extra code later. */
601 block_may_fallthru (const_tree block
)
603 /* This CONST_CAST is okay because expr_last returns its argument
604 unmodified and we assign it to a const_tree. */
605 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
607 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
611 /* Easy cases. If the last statement of the block implies
612 control transfer, then we can't fall through. */
616 /* If SWITCH_LABELS is set, this is lowered, and represents a
617 branch to a selected label and hence can not fall through.
618 Otherwise SWITCH_BODY is set, and the switch can fall
620 return SWITCH_LABELS (stmt
) == NULL_TREE
;
623 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
625 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
628 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
631 return try_catch_may_fallthru (stmt
);
633 case TRY_FINALLY_EXPR
:
634 /* The finally clause is always executed after the try clause,
635 so if it does not fall through, then the try-finally will not
636 fall through. Otherwise, if the try clause does not fall
637 through, then when the finally clause falls through it will
638 resume execution wherever the try clause was going. So the
639 whole try-finally will only fall through if both the try
640 clause and the finally clause fall through. */
641 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
642 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
645 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
646 stmt
= TREE_OPERAND (stmt
, 1);
652 /* Functions that do not return do not fall through. */
653 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
655 case CLEANUP_POINT_EXPR
:
656 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
664 /* Try to determine if we can continue executing the statement
665 immediately following STMT. This guess need not be 100% accurate;
666 simply be conservative and return true if we don't know. This is
667 used only to avoid stupidly generating extra code. If we're wrong,
668 we'll just delete the extra code later. */
671 gimple_stmt_may_fallthru (gimple stmt
)
676 switch (gimple_code (stmt
))
681 /* Easy cases. If the last statement of the seq implies
682 control transfer, then we can't fall through. */
686 /* Switch has already been lowered and represents a branch
687 to a selected label and hence can't fall through. */
691 /* GIMPLE_COND's are already lowered into a two-way branch. They
692 can't fall through. */
696 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
699 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
700 return gimple_try_catch_may_fallthru (stmt
);
702 /* It must be a GIMPLE_TRY_FINALLY. */
704 /* The finally clause is always executed after the try clause,
705 so if it does not fall through, then the try-finally will not
706 fall through. Otherwise, if the try clause does not fall
707 through, then when the finally clause falls through it will
708 resume execution wherever the try clause was going. So the
709 whole try-finally will only fall through if both the try
710 clause and the finally clause fall through. */
711 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
712 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
715 /* Functions that do not return do not fall through. */
716 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
724 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
727 gimple_seq_may_fallthru (gimple_seq seq
)
729 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
733 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
736 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
738 gimple stmt
= gsi_stmt (*gsi
);
741 return_statements_t tmp_rs
;
743 /* Match this up with an existing return statement that's been created. */
744 for (i
= VEC_length (return_statements_t
, data
->return_statements
) - 1;
747 tmp_rs
= *VEC_index (return_statements_t
, data
->return_statements
, i
);
749 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
753 /* Not found. Create a new label and record the return statement. */
754 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
756 VEC_safe_push (return_statements_t
, heap
, data
->return_statements
, &tmp_rs
);
758 /* Generate a goto statement and remove the return statement. */
760 t
= gimple_build_goto (tmp_rs
.label
);
761 gimple_set_location (t
, gimple_location (stmt
));
762 gimple_set_block (t
, gimple_block (stmt
));
763 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
764 gsi_remove (gsi
, false);
767 /* Lower a __builtin_setjmp GSI.
769 __builtin_setjmp is passed a pointer to an array of five words (not
770 all will be used on all machines). It operates similarly to the C
771 library function of the same name, but is more efficient.
773 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
774 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
775 __builtin_setjmp_dispatcher shared among all the instances; that's
776 why it is only emitted at the end by lower_function_body.
778 After full lowering, the body of the function should look like:
787 __builtin_setjmp_setup (&buf, &<D1847>);
791 __builtin_setjmp_receiver (&<D1847>);
794 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
798 __builtin_setjmp_setup (&buf, &<D2847>);
802 __builtin_setjmp_receiver (&<D2847>);
805 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
811 <D3853>: [non-local];
812 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
816 The dispatcher block will be both the unique destination of all the
817 abnormal call edges and the unique source of all the abnormal edges
818 to the receivers, thus keeping the complexity explosion localized. */
821 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
823 gimple stmt
= gsi_stmt (*gsi
);
824 location_t loc
= gimple_location (stmt
);
825 tree cont_label
= create_artificial_label (loc
);
826 tree next_label
= create_artificial_label (loc
);
830 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
831 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
832 FORCED_LABEL (next_label
) = 1;
834 dest
= gimple_call_lhs (stmt
);
836 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
837 arg
= build_addr (next_label
, current_function_decl
);
838 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_SETUP
];
839 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
840 gimple_set_location (g
, loc
);
841 gimple_set_block (g
, gimple_block (stmt
));
842 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
844 /* Build 'DEST = 0' and insert. */
847 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
849 gimple_set_location (g
, loc
);
850 gimple_set_block (g
, gimple_block (stmt
));
851 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
854 /* Build 'goto CONT_LABEL' and insert. */
855 g
= gimple_build_goto (cont_label
);
856 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
858 /* Build 'NEXT_LABEL:' and insert. */
859 g
= gimple_build_label (next_label
);
860 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
862 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
863 arg
= build_addr (next_label
, current_function_decl
);
864 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_RECEIVER
];
865 g
= gimple_build_call (t
, 1, arg
);
866 gimple_set_location (g
, loc
);
867 gimple_set_block (g
, gimple_block (stmt
));
868 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
870 /* Build 'DEST = 1' and insert. */
873 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
875 gimple_set_location (g
, loc
);
876 gimple_set_block (g
, gimple_block (stmt
));
877 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
880 /* Build 'CONT_LABEL:' and insert. */
881 g
= gimple_build_label (cont_label
);
882 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
884 /* Remove the call to __builtin_setjmp. */
885 gsi_remove (gsi
, false);
889 /* Record the variables in VARS into function FN. */
892 record_vars_into (tree vars
, tree fn
)
894 if (fn
!= current_function_decl
)
895 push_cfun (DECL_STRUCT_FUNCTION (fn
));
897 for (; vars
; vars
= TREE_CHAIN (vars
))
901 /* BIND_EXPRs contains also function/type/constant declarations
902 we don't need to care about. */
903 if (TREE_CODE (var
) != VAR_DECL
)
906 /* Nothing to do in this case. */
907 if (DECL_EXTERNAL (var
))
910 /* Record the variable. */
911 add_local_decl (cfun
, var
);
914 if (fn
!= current_function_decl
)
919 /* Record the variables in VARS into current_function_decl. */
922 record_vars (tree vars
)
924 record_vars_into (vars
, current_function_decl
);