1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "diagnostic.h"
33 #include "langhooks.h"
34 #include "langhooks-def.h"
35 #include "tree-flow.h"
43 #include "tree-pass.h"
45 /* The differences between High GIMPLE and Low GIMPLE are the
48 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
50 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 flow and exception regions are built as an on-the-side region
52 hierarchy (See tree-eh.c:lower_eh_constructs).
54 3- Multiple identical return statements are grouped into a single
55 return and gotos to the unique return site. */
57 /* Match a return statement with a label. During lowering, we identify
58 identical return statements and replace duplicates with a jump to
59 the corresponding label. */
60 struct return_statements_t
65 typedef struct return_statements_t return_statements_t
;
67 DEF_VEC_O(return_statements_t
);
68 DEF_VEC_ALLOC_O(return_statements_t
,heap
);
72 /* Block the current statement belongs to. */
75 /* A vector of label and return statements to be moved to the end
77 VEC(return_statements_t
,heap
) *return_statements
;
79 /* True if the current statement cannot fall through. */
82 /* True if the function calls __builtin_setjmp. */
83 bool calls_builtin_setjmp
;
86 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
87 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
88 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
89 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
92 /* Lower the body of current_function_decl from High GIMPLE into Low
96 lower_function_body (void)
98 struct lower_data data
;
99 gimple_seq body
= gimple_body (current_function_decl
);
100 gimple_seq lowered_body
;
101 gimple_stmt_iterator i
;
106 /* The gimplifier should've left a body of exactly one statement,
107 namely a GIMPLE_BIND. */
108 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
109 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
111 memset (&data
, 0, sizeof (data
));
112 data
.block
= DECL_INITIAL (current_function_decl
);
113 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
114 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
115 TREE_ASM_WRITTEN (data
.block
) = 1;
116 data
.return_statements
= VEC_alloc (return_statements_t
, heap
, 8);
118 bind
= gimple_seq_first_stmt (body
);
120 gimple_seq_add_stmt (&lowered_body
, bind
);
121 i
= gsi_start (lowered_body
);
122 lower_gimple_bind (&i
, &data
);
124 /* Once the old body has been lowered, replace it with the new
126 gimple_set_body (current_function_decl
, lowered_body
);
128 i
= gsi_last (lowered_body
);
130 /* If the function falls off the end, we need a null return statement.
131 If we've already got one in the return_statements vector, we don't
132 need to do anything special. Otherwise build one by hand. */
133 if (gimple_seq_may_fallthru (lowered_body
)
134 && (VEC_empty (return_statements_t
, data
.return_statements
)
135 || gimple_return_retval (VEC_last (return_statements_t
,
136 data
.return_statements
)->stmt
) != NULL
))
138 x
= gimple_build_return (NULL
);
139 gimple_set_location (x
, cfun
->function_end_locus
);
140 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
141 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
144 /* If we lowered any return statements, emit the representative
145 at the end of the function. */
146 while (!VEC_empty (return_statements_t
, data
.return_statements
))
148 return_statements_t t
;
150 /* Unfortunately, we can't use VEC_pop because it returns void for
152 t
= *VEC_last (return_statements_t
, data
.return_statements
);
153 VEC_truncate (return_statements_t
,
154 data
.return_statements
,
155 VEC_length (return_statements_t
,
156 data
.return_statements
) - 1);
158 x
= gimple_build_label (t
.label
);
159 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
161 /* Remove the line number from the representative return statement.
162 It now fills in for many such returns. Failure to remove this
163 will result in incorrect results for coverage analysis. */
164 gimple_set_location (t
.stmt
, UNKNOWN_LOCATION
);
165 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
168 /* If the function calls __builtin_setjmp, we need to emit the computed
169 goto that will serve as the unique dispatcher for all the receivers. */
170 if (data
.calls_builtin_setjmp
)
172 tree disp_label
, disp_var
, arg
;
174 /* Build 'DISP_LABEL:' and insert. */
175 disp_label
= create_artificial_label (cfun
->function_end_locus
);
176 /* This mark will create forward edges from every call site. */
177 DECL_NONLOCAL (disp_label
) = 1;
178 cfun
->has_nonlocal_label
= 1;
179 x
= gimple_build_label (disp_label
);
180 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
182 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
184 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
185 arg
= build_addr (disp_label
, current_function_decl
);
186 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_DISPATCHER
];
187 x
= gimple_build_call (t
, 1, arg
);
188 gimple_call_set_lhs (x
, disp_var
);
190 /* Build 'goto DISP_VAR;' and insert. */
191 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
192 x
= gimple_build_goto (disp_var
);
193 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
196 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
197 BLOCK_SUBBLOCKS (data
.block
)
198 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
200 clear_block_marks (data
.block
);
201 VEC_free(return_statements_t
, heap
, data
.return_statements
);
205 struct gimple_opt_pass pass_lower_cf
=
211 lower_function_body
, /* execute */
214 0, /* static_pass_number */
216 PROP_gimple_any
, /* properties_required */
217 PROP_gimple_lcf
, /* properties_provided */
218 0, /* properties_destroyed */
219 0, /* todo_flags_start */
220 TODO_dump_func
/* todo_flags_finish */
225 /* Verify if the type of the argument matches that of the function
226 declaration. If we cannot verify this or there is a mismatch,
230 gimple_check_call_args (gimple stmt
)
232 tree fndecl
, parms
, p
;
233 unsigned int i
, nargs
;
235 nargs
= gimple_call_num_args (stmt
);
237 /* Get argument types for verification. */
238 fndecl
= gimple_call_fndecl (stmt
);
241 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
242 else if (POINTER_TYPE_P (TREE_TYPE (gimple_call_fn (stmt
))))
243 parms
= TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt
))));
245 /* Verify if the type of the argument matches that of the function
246 declaration. If we cannot verify this or there is a mismatch,
248 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
250 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
252 i
++, p
= TREE_CHAIN (p
))
254 /* We cannot distinguish a varargs function from the case
255 of excess parameters, still deferring the inlining decision
256 to the callee is possible. */
259 if (p
== error_mark_node
260 || gimple_call_arg (stmt
, i
) == error_mark_node
261 || !fold_convertible_p (DECL_ARG_TYPE (p
),
262 gimple_call_arg (stmt
, i
)))
268 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
270 /* If this is a varargs function defer inlining decision
274 if (TREE_VALUE (p
) == error_mark_node
275 || gimple_call_arg (stmt
, i
) == error_mark_node
276 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
277 || !fold_convertible_p (TREE_VALUE (p
),
278 gimple_call_arg (stmt
, i
)))
291 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
292 when they are changed -- if this has to be done, the lowering routine must
293 do it explicitly. DATA is passed through the recursion. */
296 lower_sequence (gimple_seq seq
, struct lower_data
*data
)
298 gimple_stmt_iterator gsi
;
300 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); )
301 lower_stmt (&gsi
, data
);
305 /* Lower the OpenMP directive statement pointed by GSI. DATA is
306 passed through the recursion. */
309 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
313 stmt
= gsi_stmt (*gsi
);
315 lower_sequence (gimple_omp_body (stmt
), data
);
316 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
317 gsi_insert_seq_before (gsi
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
318 gimple_omp_set_body (stmt
, NULL
);
319 gsi_remove (gsi
, false);
323 /* Lower statement GSI. DATA is passed through the recursion. We try to
324 track the fallthruness of statements and get rid of unreachable return
325 statements in order to prevent the EH lowering pass from adding useless
326 edges that can cause bogus warnings to be issued later; this guess need
327 not be 100% accurate, simply be conservative and reset cannot_fallthru
328 to false if we don't know. */
331 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
333 gimple stmt
= gsi_stmt (*gsi
);
335 gimple_set_block (stmt
, data
->block
);
337 switch (gimple_code (stmt
))
340 lower_gimple_bind (gsi
, data
);
341 /* Propagate fallthruness. */
347 data
->cannot_fallthru
= true;
352 if (data
->cannot_fallthru
)
354 gsi_remove (gsi
, false);
355 /* Propagate fallthruness. */
359 lower_gimple_return (gsi
, data
);
360 data
->cannot_fallthru
= true;
366 bool try_cannot_fallthru
;
367 lower_sequence (gimple_try_eval (stmt
), data
);
368 try_cannot_fallthru
= data
->cannot_fallthru
;
369 data
->cannot_fallthru
= false;
370 lower_sequence (gimple_try_cleanup (stmt
), data
);
371 /* See gimple_stmt_may_fallthru for the rationale. */
372 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
374 data
->cannot_fallthru
|= try_cannot_fallthru
;
382 data
->cannot_fallthru
= false;
383 lower_sequence (gimple_catch_handler (stmt
), data
);
386 case GIMPLE_EH_FILTER
:
387 data
->cannot_fallthru
= false;
388 lower_sequence (gimple_eh_filter_failure (stmt
), data
);
396 case GIMPLE_EH_MUST_NOT_THROW
:
398 case GIMPLE_OMP_SECTIONS
:
399 case GIMPLE_OMP_SECTIONS_SWITCH
:
400 case GIMPLE_OMP_SECTION
:
401 case GIMPLE_OMP_SINGLE
:
402 case GIMPLE_OMP_MASTER
:
403 case GIMPLE_OMP_ORDERED
:
404 case GIMPLE_OMP_CRITICAL
:
405 case GIMPLE_OMP_RETURN
:
406 case GIMPLE_OMP_ATOMIC_LOAD
:
407 case GIMPLE_OMP_ATOMIC_STORE
:
408 case GIMPLE_OMP_CONTINUE
:
413 tree decl
= gimple_call_fndecl (stmt
);
416 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
417 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
419 lower_builtin_setjmp (gsi
);
420 data
->cannot_fallthru
= false;
421 data
->calls_builtin_setjmp
= true;
425 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
427 data
->cannot_fallthru
= true;
434 case GIMPLE_OMP_PARALLEL
:
435 case GIMPLE_OMP_TASK
:
436 data
->cannot_fallthru
= false;
437 lower_omp_directive (gsi
, data
);
438 data
->cannot_fallthru
= false;
445 data
->cannot_fallthru
= false;
449 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
452 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
454 tree old_block
= data
->block
;
455 gimple stmt
= gsi_stmt (*gsi
);
456 tree new_block
= gimple_bind_block (stmt
);
460 if (new_block
== old_block
)
462 /* The outermost block of the original function may not be the
463 outermost statement chain of the gimplified function. So we
464 may see the outermost block just inside the function. */
465 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
470 /* We do not expect to handle duplicate blocks. */
471 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
472 TREE_ASM_WRITTEN (new_block
) = 1;
474 /* Block tree may get clobbered by inlining. Normally this would
475 be fixed in rest_of_decl_compilation using block notes, but
476 since we are not going to emit them, it is up to us. */
477 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
478 BLOCK_SUBBLOCKS (old_block
) = new_block
;
479 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
480 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
482 data
->block
= new_block
;
486 record_vars (gimple_bind_vars (stmt
));
487 lower_sequence (gimple_bind_body (stmt
), data
);
491 gcc_assert (data
->block
== new_block
);
493 BLOCK_SUBBLOCKS (new_block
)
494 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
495 data
->block
= old_block
;
498 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
499 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
500 gsi_remove (gsi
, false);
503 /* Try to determine whether a TRY_CATCH expression can fall through.
504 This is a subroutine of block_may_fallthru. */
507 try_catch_may_fallthru (const_tree stmt
)
509 tree_stmt_iterator i
;
511 /* If the TRY block can fall through, the whole TRY_CATCH can
513 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
516 i
= tsi_start (TREE_OPERAND (stmt
, 1));
517 switch (TREE_CODE (tsi_stmt (i
)))
520 /* We expect to see a sequence of CATCH_EXPR trees, each with a
521 catch expression and a body. The whole TRY_CATCH may fall
522 through iff any of the catch bodies falls through. */
523 for (; !tsi_end_p (i
); tsi_next (&i
))
525 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
531 /* The exception filter expression only matters if there is an
532 exception. If the exception does not match EH_FILTER_TYPES,
533 we will execute EH_FILTER_FAILURE, and we will fall through
534 if that falls through. If the exception does match
535 EH_FILTER_TYPES, the stack unwinder will continue up the
536 stack, so we will not fall through. We don't know whether we
537 will throw an exception which matches EH_FILTER_TYPES or not,
538 so we just ignore EH_FILTER_TYPES and assume that we might
539 throw an exception which doesn't match. */
540 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
543 /* This case represents statements to be executed when an
544 exception occurs. Those statements are implicitly followed
545 by a RESX statement to resume execution after the exception.
546 So in this case the TRY_CATCH never falls through. */
552 /* Same as above, but for a GIMPLE_TRY_CATCH. */
555 gimple_try_catch_may_fallthru (gimple stmt
)
557 gimple_stmt_iterator i
;
559 /* We don't handle GIMPLE_TRY_FINALLY. */
560 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
562 /* If the TRY block can fall through, the whole TRY_CATCH can
564 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
567 i
= gsi_start (gimple_try_cleanup (stmt
));
568 switch (gimple_code (gsi_stmt (i
)))
571 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
572 catch expression and a body. The whole try/catch may fall
573 through iff any of the catch bodies falls through. */
574 for (; !gsi_end_p (i
); gsi_next (&i
))
576 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
581 case GIMPLE_EH_FILTER
:
582 /* The exception filter expression only matters if there is an
583 exception. If the exception does not match EH_FILTER_TYPES,
584 we will execute EH_FILTER_FAILURE, and we will fall through
585 if that falls through. If the exception does match
586 EH_FILTER_TYPES, the stack unwinder will continue up the
587 stack, so we will not fall through. We don't know whether we
588 will throw an exception which matches EH_FILTER_TYPES or not,
589 so we just ignore EH_FILTER_TYPES and assume that we might
590 throw an exception which doesn't match. */
591 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
594 /* This case represents statements to be executed when an
595 exception occurs. Those statements are implicitly followed
596 by a GIMPLE_RESX to resume execution after the exception. So
597 in this case the try/catch never falls through. */
603 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
604 need not be 100% accurate; simply be conservative and return true if we
605 don't know. This is used only to avoid stupidly generating extra code.
606 If we're wrong, we'll just delete the extra code later. */
609 block_may_fallthru (const_tree block
)
611 /* This CONST_CAST is okay because expr_last returns its argument
612 unmodified and we assign it to a const_tree. */
613 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
615 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
619 /* Easy cases. If the last statement of the block implies
620 control transfer, then we can't fall through. */
624 /* If SWITCH_LABELS is set, this is lowered, and represents a
625 branch to a selected label and hence can not fall through.
626 Otherwise SWITCH_BODY is set, and the switch can fall
628 return SWITCH_LABELS (stmt
) == NULL_TREE
;
631 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
633 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
636 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
639 return try_catch_may_fallthru (stmt
);
641 case TRY_FINALLY_EXPR
:
642 /* The finally clause is always executed after the try clause,
643 so if it does not fall through, then the try-finally will not
644 fall through. Otherwise, if the try clause does not fall
645 through, then when the finally clause falls through it will
646 resume execution wherever the try clause was going. So the
647 whole try-finally will only fall through if both the try
648 clause and the finally clause fall through. */
649 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
650 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
653 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
654 stmt
= TREE_OPERAND (stmt
, 1);
660 /* Functions that do not return do not fall through. */
661 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
663 case CLEANUP_POINT_EXPR
:
664 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
672 /* Try to determine if we can continue executing the statement
673 immediately following STMT. This guess need not be 100% accurate;
674 simply be conservative and return true if we don't know. This is
675 used only to avoid stupidly generating extra code. If we're wrong,
676 we'll just delete the extra code later. */
679 gimple_stmt_may_fallthru (gimple stmt
)
684 switch (gimple_code (stmt
))
689 /* Easy cases. If the last statement of the seq implies
690 control transfer, then we can't fall through. */
694 /* Switch has already been lowered and represents a branch
695 to a selected label and hence can't fall through. */
699 /* GIMPLE_COND's are already lowered into a two-way branch. They
700 can't fall through. */
704 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
707 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
708 return gimple_try_catch_may_fallthru (stmt
);
710 /* It must be a GIMPLE_TRY_FINALLY. */
712 /* The finally clause is always executed after the try clause,
713 so if it does not fall through, then the try-finally will not
714 fall through. Otherwise, if the try clause does not fall
715 through, then when the finally clause falls through it will
716 resume execution wherever the try clause was going. So the
717 whole try-finally will only fall through if both the try
718 clause and the finally clause fall through. */
719 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
720 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
723 /* Functions that do not return do not fall through. */
724 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
732 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
735 gimple_seq_may_fallthru (gimple_seq seq
)
737 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
741 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
744 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
746 gimple stmt
= gsi_stmt (*gsi
);
749 return_statements_t tmp_rs
;
751 /* Match this up with an existing return statement that's been created. */
752 for (i
= VEC_length (return_statements_t
, data
->return_statements
) - 1;
755 tmp_rs
= *VEC_index (return_statements_t
, data
->return_statements
, i
);
757 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
761 /* Not found. Create a new label and record the return statement. */
762 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
764 VEC_safe_push (return_statements_t
, heap
, data
->return_statements
, &tmp_rs
);
766 /* Generate a goto statement and remove the return statement. */
768 t
= gimple_build_goto (tmp_rs
.label
);
769 gimple_set_location (t
, gimple_location (stmt
));
770 gimple_set_block (t
, gimple_block (stmt
));
771 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
772 gsi_remove (gsi
, false);
775 /* Lower a __builtin_setjmp GSI.
777 __builtin_setjmp is passed a pointer to an array of five words (not
778 all will be used on all machines). It operates similarly to the C
779 library function of the same name, but is more efficient.
781 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
782 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
783 __builtin_setjmp_dispatcher shared among all the instances; that's
784 why it is only emitted at the end by lower_function_body.
786 After full lowering, the body of the function should look like:
795 __builtin_setjmp_setup (&buf, &<D1847>);
799 __builtin_setjmp_receiver (&<D1847>);
802 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
806 __builtin_setjmp_setup (&buf, &<D2847>);
810 __builtin_setjmp_receiver (&<D2847>);
813 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
819 <D3853>: [non-local];
820 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
824 The dispatcher block will be both the unique destination of all the
825 abnormal call edges and the unique source of all the abnormal edges
826 to the receivers, thus keeping the complexity explosion localized. */
829 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
831 gimple stmt
= gsi_stmt (*gsi
);
832 location_t loc
= gimple_location (stmt
);
833 tree cont_label
= create_artificial_label (loc
);
834 tree next_label
= create_artificial_label (loc
);
838 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
839 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
840 FORCED_LABEL (next_label
) = 1;
842 dest
= gimple_call_lhs (stmt
);
844 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
845 arg
= build_addr (next_label
, current_function_decl
);
846 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_SETUP
];
847 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
848 gimple_set_location (g
, loc
);
849 gimple_set_block (g
, gimple_block (stmt
));
850 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
852 /* Build 'DEST = 0' and insert. */
855 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
857 gimple_set_location (g
, loc
);
858 gimple_set_block (g
, gimple_block (stmt
));
859 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
862 /* Build 'goto CONT_LABEL' and insert. */
863 g
= gimple_build_goto (cont_label
);
864 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
866 /* Build 'NEXT_LABEL:' and insert. */
867 g
= gimple_build_label (next_label
);
868 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
870 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
871 arg
= build_addr (next_label
, current_function_decl
);
872 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_RECEIVER
];
873 g
= gimple_build_call (t
, 1, arg
);
874 gimple_set_location (g
, loc
);
875 gimple_set_block (g
, gimple_block (stmt
));
876 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
878 /* Build 'DEST = 1' and insert. */
881 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
883 gimple_set_location (g
, loc
);
884 gimple_set_block (g
, gimple_block (stmt
));
885 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
888 /* Build 'CONT_LABEL:' and insert. */
889 g
= gimple_build_label (cont_label
);
890 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
892 /* Remove the call to __builtin_setjmp. */
893 gsi_remove (gsi
, false);
897 /* Record the variables in VARS into function FN. */
900 record_vars_into (tree vars
, tree fn
)
902 if (fn
!= current_function_decl
)
903 push_cfun (DECL_STRUCT_FUNCTION (fn
));
905 for (; vars
; vars
= TREE_CHAIN (vars
))
909 /* BIND_EXPRs contains also function/type/constant declarations
910 we don't need to care about. */
911 if (TREE_CODE (var
) != VAR_DECL
)
914 /* Nothing to do in this case. */
915 if (DECL_EXTERNAL (var
))
918 /* Record the variable. */
919 cfun
->local_decls
= tree_cons (NULL_TREE
, var
,
923 if (fn
!= current_function_decl
)
928 /* Record the variables in VARS into current_function_decl. */
931 record_vars (tree vars
)
933 record_vars_into (vars
, current_function_decl
);