re PR lto/48086 (bootstrap-lto creates c-common.s with too many sections on x86_64...
[official-gcc.git] / gcc / gimple-low.c
blob01aeb4940d38d103dabb7723540037815f6b962b
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
37 following:
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
53 tree label;
54 gimple stmt;
56 typedef struct return_statements_t return_statements_t;
58 DEF_VEC_O(return_statements_t);
59 DEF_VEC_ALLOC_O(return_statements_t,heap);
61 struct lower_data
63 /* Block the current statement belongs to. */
64 tree block;
66 /* A vector of label and return statements to be moved to the end
67 of the function. */
68 VEC(return_statements_t,heap) *return_statements;
70 /* True if the current statement cannot fall through. */
71 bool cannot_fallthru;
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp;
77 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_builtin_setjmp (gimple_stmt_iterator *);
83 /* Lower the body of current_function_decl from High GIMPLE into Low
84 GIMPLE. */
86 static unsigned int
87 lower_function_body (void)
89 struct lower_data data;
90 gimple_seq body = gimple_body (current_function_decl);
91 gimple_seq lowered_body;
92 gimple_stmt_iterator i;
93 gimple bind;
94 tree t;
95 gimple x;
97 /* The gimplifier should've left a body of exactly one statement,
98 namely a GIMPLE_BIND. */
99 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
100 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
102 memset (&data, 0, sizeof (data));
103 data.block = DECL_INITIAL (current_function_decl);
104 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
105 BLOCK_CHAIN (data.block) = NULL_TREE;
106 TREE_ASM_WRITTEN (data.block) = 1;
107 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
109 bind = gimple_seq_first_stmt (body);
110 lowered_body = NULL;
111 gimple_seq_add_stmt (&lowered_body, bind);
112 i = gsi_start (lowered_body);
113 lower_gimple_bind (&i, &data);
115 /* Once the old body has been lowered, replace it with the new
116 lowered sequence. */
117 gimple_set_body (current_function_decl, lowered_body);
119 i = gsi_last (lowered_body);
121 /* If the function falls off the end, we need a null return statement.
122 If we've already got one in the return_statements vector, we don't
123 need to do anything special. Otherwise build one by hand. */
124 if (gimple_seq_may_fallthru (lowered_body)
125 && (VEC_empty (return_statements_t, data.return_statements)
126 || gimple_return_retval (VEC_last (return_statements_t,
127 data.return_statements)->stmt) != NULL))
129 x = gimple_build_return (NULL);
130 gimple_set_location (x, cfun->function_end_locus);
131 gimple_set_block (x, DECL_INITIAL (current_function_decl));
132 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
135 /* If we lowered any return statements, emit the representative
136 at the end of the function. */
137 while (!VEC_empty (return_statements_t, data.return_statements))
139 return_statements_t t;
141 /* Unfortunately, we can't use VEC_pop because it returns void for
142 objects. */
143 t = *VEC_last (return_statements_t, data.return_statements);
144 VEC_truncate (return_statements_t,
145 data.return_statements,
146 VEC_length (return_statements_t,
147 data.return_statements) - 1);
149 x = gimple_build_label (t.label);
150 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
151 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
154 /* If the function calls __builtin_setjmp, we need to emit the computed
155 goto that will serve as the unique dispatcher for all the receivers. */
156 if (data.calls_builtin_setjmp)
158 tree disp_label, disp_var, arg;
160 /* Build 'DISP_LABEL:' and insert. */
161 disp_label = create_artificial_label (cfun->function_end_locus);
162 /* This mark will create forward edges from every call site. */
163 DECL_NONLOCAL (disp_label) = 1;
164 cfun->has_nonlocal_label = 1;
165 x = gimple_build_label (disp_label);
166 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
168 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
169 and insert. */
170 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
171 arg = build_addr (disp_label, current_function_decl);
172 t = implicit_built_in_decls[BUILT_IN_SETJMP_DISPATCHER];
173 x = gimple_build_call (t, 1, arg);
174 gimple_call_set_lhs (x, disp_var);
176 /* Build 'goto DISP_VAR;' and insert. */
177 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
178 x = gimple_build_goto (disp_var);
179 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
182 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
183 BLOCK_SUBBLOCKS (data.block)
184 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
186 clear_block_marks (data.block);
187 VEC_free(return_statements_t, heap, data.return_statements);
188 return 0;
191 struct gimple_opt_pass pass_lower_cf =
194 GIMPLE_PASS,
195 "lower", /* name */
196 NULL, /* gate */
197 lower_function_body, /* execute */
198 NULL, /* sub */
199 NULL, /* next */
200 0, /* static_pass_number */
201 TV_NONE, /* tv_id */
202 PROP_gimple_any, /* properties_required */
203 PROP_gimple_lcf, /* properties_provided */
204 0, /* properties_destroyed */
205 0, /* todo_flags_start */
206 TODO_dump_func /* todo_flags_finish */
212 /* Verify if the type of the argument matches that of the function
213 declaration. If we cannot verify this or there is a mismatch,
214 return false. */
216 static bool
217 gimple_check_call_args (gimple stmt, tree fndecl)
219 tree parms, p;
220 unsigned int i, nargs;
222 nargs = gimple_call_num_args (stmt);
224 /* Get argument types for verification. */
225 if (fndecl)
226 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
227 else
228 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
230 /* Verify if the type of the argument matches that of the function
231 declaration. If we cannot verify this or there is a mismatch,
232 return false. */
233 if (fndecl && DECL_ARGUMENTS (fndecl))
235 for (i = 0, p = DECL_ARGUMENTS (fndecl);
236 i < nargs;
237 i++, p = DECL_CHAIN (p))
239 /* We cannot distinguish a varargs function from the case
240 of excess parameters, still deferring the inlining decision
241 to the callee is possible. */
242 if (!p)
243 break;
244 if (p == error_mark_node
245 || gimple_call_arg (stmt, i) == error_mark_node
246 || !fold_convertible_p (DECL_ARG_TYPE (p),
247 gimple_call_arg (stmt, i)))
248 return false;
251 else if (parms)
253 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
255 /* If this is a varargs function defer inlining decision
256 to callee. */
257 if (!p)
258 break;
259 if (TREE_VALUE (p) == error_mark_node
260 || gimple_call_arg (stmt, i) == error_mark_node
261 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
262 || !fold_convertible_p (TREE_VALUE (p),
263 gimple_call_arg (stmt, i)))
264 return false;
267 else
269 if (nargs != 0)
270 return false;
272 return true;
275 /* Verify if the type of the argument and lhs of CALL_STMT matches
276 that of the function declaration CALLEE.
277 If we cannot verify this or there is a mismatch, return false. */
279 bool
280 gimple_check_call_matching_types (gimple call_stmt, tree callee)
282 tree lhs;
284 if ((DECL_RESULT (callee)
285 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
286 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
287 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
288 TREE_TYPE (lhs))
289 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
290 || !gimple_check_call_args (call_stmt, callee))
291 return false;
292 return true;
295 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
296 when they are changed -- if this has to be done, the lowering routine must
297 do it explicitly. DATA is passed through the recursion. */
299 static void
300 lower_sequence (gimple_seq seq, struct lower_data *data)
302 gimple_stmt_iterator gsi;
304 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
305 lower_stmt (&gsi, data);
309 /* Lower the OpenMP directive statement pointed by GSI. DATA is
310 passed through the recursion. */
312 static void
313 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
315 gimple stmt;
317 stmt = gsi_stmt (*gsi);
319 lower_sequence (gimple_omp_body (stmt), data);
320 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
321 gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
322 gimple_omp_set_body (stmt, NULL);
323 gsi_remove (gsi, false);
327 /* Lower statement GSI. DATA is passed through the recursion. We try to
328 track the fallthruness of statements and get rid of unreachable return
329 statements in order to prevent the EH lowering pass from adding useless
330 edges that can cause bogus warnings to be issued later; this guess need
331 not be 100% accurate, simply be conservative and reset cannot_fallthru
332 to false if we don't know. */
334 static void
335 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
337 gimple stmt = gsi_stmt (*gsi);
339 gimple_set_block (stmt, data->block);
341 switch (gimple_code (stmt))
343 case GIMPLE_BIND:
344 lower_gimple_bind (gsi, data);
345 /* Propagate fallthruness. */
346 return;
348 case GIMPLE_COND:
349 case GIMPLE_GOTO:
350 case GIMPLE_SWITCH:
351 data->cannot_fallthru = true;
352 gsi_next (gsi);
353 return;
355 case GIMPLE_RETURN:
356 if (data->cannot_fallthru)
358 gsi_remove (gsi, false);
359 /* Propagate fallthruness. */
361 else
363 lower_gimple_return (gsi, data);
364 data->cannot_fallthru = true;
366 return;
368 case GIMPLE_TRY:
370 bool try_cannot_fallthru;
371 lower_sequence (gimple_try_eval (stmt), data);
372 try_cannot_fallthru = data->cannot_fallthru;
373 data->cannot_fallthru = false;
374 lower_sequence (gimple_try_cleanup (stmt), data);
375 /* See gimple_stmt_may_fallthru for the rationale. */
376 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
378 data->cannot_fallthru |= try_cannot_fallthru;
379 gsi_next (gsi);
380 return;
383 break;
385 case GIMPLE_CATCH:
386 data->cannot_fallthru = false;
387 lower_sequence (gimple_catch_handler (stmt), data);
388 break;
390 case GIMPLE_EH_FILTER:
391 data->cannot_fallthru = false;
392 lower_sequence (gimple_eh_filter_failure (stmt), data);
393 break;
395 case GIMPLE_NOP:
396 case GIMPLE_ASM:
397 case GIMPLE_ASSIGN:
398 case GIMPLE_PREDICT:
399 case GIMPLE_LABEL:
400 case GIMPLE_EH_MUST_NOT_THROW:
401 case GIMPLE_OMP_FOR:
402 case GIMPLE_OMP_SECTIONS:
403 case GIMPLE_OMP_SECTIONS_SWITCH:
404 case GIMPLE_OMP_SECTION:
405 case GIMPLE_OMP_SINGLE:
406 case GIMPLE_OMP_MASTER:
407 case GIMPLE_OMP_ORDERED:
408 case GIMPLE_OMP_CRITICAL:
409 case GIMPLE_OMP_RETURN:
410 case GIMPLE_OMP_ATOMIC_LOAD:
411 case GIMPLE_OMP_ATOMIC_STORE:
412 case GIMPLE_OMP_CONTINUE:
413 break;
415 case GIMPLE_CALL:
417 tree decl = gimple_call_fndecl (stmt);
419 if (decl
420 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
421 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
423 lower_builtin_setjmp (gsi);
424 data->cannot_fallthru = false;
425 data->calls_builtin_setjmp = true;
426 return;
429 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
431 data->cannot_fallthru = true;
432 gsi_next (gsi);
433 return;
436 break;
438 case GIMPLE_OMP_PARALLEL:
439 case GIMPLE_OMP_TASK:
440 data->cannot_fallthru = false;
441 lower_omp_directive (gsi, data);
442 data->cannot_fallthru = false;
443 return;
445 default:
446 gcc_unreachable ();
449 data->cannot_fallthru = false;
450 gsi_next (gsi);
453 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
455 static void
456 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
458 tree old_block = data->block;
459 gimple stmt = gsi_stmt (*gsi);
460 tree new_block = gimple_bind_block (stmt);
462 if (new_block)
464 if (new_block == old_block)
466 /* The outermost block of the original function may not be the
467 outermost statement chain of the gimplified function. So we
468 may see the outermost block just inside the function. */
469 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
470 new_block = NULL;
472 else
474 /* We do not expect to handle duplicate blocks. */
475 gcc_assert (!TREE_ASM_WRITTEN (new_block));
476 TREE_ASM_WRITTEN (new_block) = 1;
478 /* Block tree may get clobbered by inlining. Normally this would
479 be fixed in rest_of_decl_compilation using block notes, but
480 since we are not going to emit them, it is up to us. */
481 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
482 BLOCK_SUBBLOCKS (old_block) = new_block;
483 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
484 BLOCK_SUPERCONTEXT (new_block) = old_block;
486 data->block = new_block;
490 record_vars (gimple_bind_vars (stmt));
491 lower_sequence (gimple_bind_body (stmt), data);
493 if (new_block)
495 gcc_assert (data->block == new_block);
497 BLOCK_SUBBLOCKS (new_block)
498 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
499 data->block = old_block;
502 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
503 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
504 gsi_remove (gsi, false);
507 /* Try to determine whether a TRY_CATCH expression can fall through.
508 This is a subroutine of block_may_fallthru. */
510 static bool
511 try_catch_may_fallthru (const_tree stmt)
513 tree_stmt_iterator i;
515 /* If the TRY block can fall through, the whole TRY_CATCH can
516 fall through. */
517 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
518 return true;
520 i = tsi_start (TREE_OPERAND (stmt, 1));
521 switch (TREE_CODE (tsi_stmt (i)))
523 case CATCH_EXPR:
524 /* We expect to see a sequence of CATCH_EXPR trees, each with a
525 catch expression and a body. The whole TRY_CATCH may fall
526 through iff any of the catch bodies falls through. */
527 for (; !tsi_end_p (i); tsi_next (&i))
529 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
530 return true;
532 return false;
534 case EH_FILTER_EXPR:
535 /* The exception filter expression only matters if there is an
536 exception. If the exception does not match EH_FILTER_TYPES,
537 we will execute EH_FILTER_FAILURE, and we will fall through
538 if that falls through. If the exception does match
539 EH_FILTER_TYPES, the stack unwinder will continue up the
540 stack, so we will not fall through. We don't know whether we
541 will throw an exception which matches EH_FILTER_TYPES or not,
542 so we just ignore EH_FILTER_TYPES and assume that we might
543 throw an exception which doesn't match. */
544 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
546 default:
547 /* This case represents statements to be executed when an
548 exception occurs. Those statements are implicitly followed
549 by a RESX statement to resume execution after the exception.
550 So in this case the TRY_CATCH never falls through. */
551 return false;
556 /* Same as above, but for a GIMPLE_TRY_CATCH. */
558 static bool
559 gimple_try_catch_may_fallthru (gimple stmt)
561 gimple_stmt_iterator i;
563 /* We don't handle GIMPLE_TRY_FINALLY. */
564 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
566 /* If the TRY block can fall through, the whole TRY_CATCH can
567 fall through. */
568 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
569 return true;
571 i = gsi_start (gimple_try_cleanup (stmt));
572 switch (gimple_code (gsi_stmt (i)))
574 case GIMPLE_CATCH:
575 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
576 catch expression and a body. The whole try/catch may fall
577 through iff any of the catch bodies falls through. */
578 for (; !gsi_end_p (i); gsi_next (&i))
580 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
581 return true;
583 return false;
585 case GIMPLE_EH_FILTER:
586 /* The exception filter expression only matters if there is an
587 exception. If the exception does not match EH_FILTER_TYPES,
588 we will execute EH_FILTER_FAILURE, and we will fall through
589 if that falls through. If the exception does match
590 EH_FILTER_TYPES, the stack unwinder will continue up the
591 stack, so we will not fall through. We don't know whether we
592 will throw an exception which matches EH_FILTER_TYPES or not,
593 so we just ignore EH_FILTER_TYPES and assume that we might
594 throw an exception which doesn't match. */
595 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
597 default:
598 /* This case represents statements to be executed when an
599 exception occurs. Those statements are implicitly followed
600 by a GIMPLE_RESX to resume execution after the exception. So
601 in this case the try/catch never falls through. */
602 return false;
607 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
608 need not be 100% accurate; simply be conservative and return true if we
609 don't know. This is used only to avoid stupidly generating extra code.
610 If we're wrong, we'll just delete the extra code later. */
612 bool
613 block_may_fallthru (const_tree block)
615 /* This CONST_CAST is okay because expr_last returns its argument
616 unmodified and we assign it to a const_tree. */
617 const_tree stmt = expr_last (CONST_CAST_TREE(block));
619 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
621 case GOTO_EXPR:
622 case RETURN_EXPR:
623 /* Easy cases. If the last statement of the block implies
624 control transfer, then we can't fall through. */
625 return false;
627 case SWITCH_EXPR:
628 /* If SWITCH_LABELS is set, this is lowered, and represents a
629 branch to a selected label and hence can not fall through.
630 Otherwise SWITCH_BODY is set, and the switch can fall
631 through. */
632 return SWITCH_LABELS (stmt) == NULL_TREE;
634 case COND_EXPR:
635 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
636 return true;
637 return block_may_fallthru (COND_EXPR_ELSE (stmt));
639 case BIND_EXPR:
640 return block_may_fallthru (BIND_EXPR_BODY (stmt));
642 case TRY_CATCH_EXPR:
643 return try_catch_may_fallthru (stmt);
645 case TRY_FINALLY_EXPR:
646 /* The finally clause is always executed after the try clause,
647 so if it does not fall through, then the try-finally will not
648 fall through. Otherwise, if the try clause does not fall
649 through, then when the finally clause falls through it will
650 resume execution wherever the try clause was going. So the
651 whole try-finally will only fall through if both the try
652 clause and the finally clause fall through. */
653 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
654 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
656 case MODIFY_EXPR:
657 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
658 stmt = TREE_OPERAND (stmt, 1);
659 else
660 return true;
661 /* FALLTHRU */
663 case CALL_EXPR:
664 /* Functions that do not return do not fall through. */
665 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
667 case CLEANUP_POINT_EXPR:
668 return block_may_fallthru (TREE_OPERAND (stmt, 0));
670 default:
671 return true;
676 /* Try to determine if we can continue executing the statement
677 immediately following STMT. This guess need not be 100% accurate;
678 simply be conservative and return true if we don't know. This is
679 used only to avoid stupidly generating extra code. If we're wrong,
680 we'll just delete the extra code later. */
682 bool
683 gimple_stmt_may_fallthru (gimple stmt)
685 if (!stmt)
686 return true;
688 switch (gimple_code (stmt))
690 case GIMPLE_GOTO:
691 case GIMPLE_RETURN:
692 case GIMPLE_RESX:
693 /* Easy cases. If the last statement of the seq implies
694 control transfer, then we can't fall through. */
695 return false;
697 case GIMPLE_SWITCH:
698 /* Switch has already been lowered and represents a branch
699 to a selected label and hence can't fall through. */
700 return false;
702 case GIMPLE_COND:
703 /* GIMPLE_COND's are already lowered into a two-way branch. They
704 can't fall through. */
705 return false;
707 case GIMPLE_BIND:
708 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
710 case GIMPLE_TRY:
711 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
712 return gimple_try_catch_may_fallthru (stmt);
714 /* It must be a GIMPLE_TRY_FINALLY. */
716 /* The finally clause is always executed after the try clause,
717 so if it does not fall through, then the try-finally will not
718 fall through. Otherwise, if the try clause does not fall
719 through, then when the finally clause falls through it will
720 resume execution wherever the try clause was going. So the
721 whole try-finally will only fall through if both the try
722 clause and the finally clause fall through. */
723 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
724 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
726 case GIMPLE_CALL:
727 /* Functions that do not return do not fall through. */
728 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
730 default:
731 return true;
736 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
738 bool
739 gimple_seq_may_fallthru (gimple_seq seq)
741 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
745 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
747 static void
748 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
750 gimple stmt = gsi_stmt (*gsi);
751 gimple t;
752 int i;
753 return_statements_t tmp_rs;
755 /* Match this up with an existing return statement that's been created. */
756 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
757 i >= 0; i--)
759 tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
761 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
763 /* Remove the line number from the representative return statement.
764 It now fills in for many such returns. Failure to remove this
765 will result in incorrect results for coverage analysis. */
766 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
768 goto found;
772 /* Not found. Create a new label and record the return statement. */
773 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
774 tmp_rs.stmt = stmt;
775 VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
777 /* Generate a goto statement and remove the return statement. */
778 found:
779 /* When not optimizing, make sure user returns are preserved. */
780 if (!optimize && gimple_has_location (stmt))
781 DECL_ARTIFICIAL (tmp_rs.label) = 0;
782 t = gimple_build_goto (tmp_rs.label);
783 gimple_set_location (t, gimple_location (stmt));
784 gimple_set_block (t, gimple_block (stmt));
785 gsi_insert_before (gsi, t, GSI_SAME_STMT);
786 gsi_remove (gsi, false);
789 /* Lower a __builtin_setjmp GSI.
791 __builtin_setjmp is passed a pointer to an array of five words (not
792 all will be used on all machines). It operates similarly to the C
793 library function of the same name, but is more efficient.
795 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
796 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
797 __builtin_setjmp_dispatcher shared among all the instances; that's
798 why it is only emitted at the end by lower_function_body.
800 After full lowering, the body of the function should look like:
803 void * setjmpvar.0;
804 int D.1844;
805 int D.2844;
807 [...]
809 __builtin_setjmp_setup (&buf, &<D1847>);
810 D.1844 = 0;
811 goto <D1846>;
812 <D1847>:;
813 __builtin_setjmp_receiver (&<D1847>);
814 D.1844 = 1;
815 <D1846>:;
816 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
818 [...]
820 __builtin_setjmp_setup (&buf, &<D2847>);
821 D.2844 = 0;
822 goto <D2846>;
823 <D2847>:;
824 __builtin_setjmp_receiver (&<D2847>);
825 D.2844 = 1;
826 <D2846>:;
827 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
829 [...]
831 <D3850>:;
832 return;
833 <D3853>: [non-local];
834 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
835 goto setjmpvar.0;
838 The dispatcher block will be both the unique destination of all the
839 abnormal call edges and the unique source of all the abnormal edges
840 to the receivers, thus keeping the complexity explosion localized. */
842 static void
843 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
845 gimple stmt = gsi_stmt (*gsi);
846 location_t loc = gimple_location (stmt);
847 tree cont_label = create_artificial_label (loc);
848 tree next_label = create_artificial_label (loc);
849 tree dest, t, arg;
850 gimple g;
852 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
853 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
854 FORCED_LABEL (next_label) = 1;
856 dest = gimple_call_lhs (stmt);
858 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
859 arg = build_addr (next_label, current_function_decl);
860 t = implicit_built_in_decls[BUILT_IN_SETJMP_SETUP];
861 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
862 gimple_set_location (g, loc);
863 gimple_set_block (g, gimple_block (stmt));
864 gsi_insert_before (gsi, g, GSI_SAME_STMT);
866 /* Build 'DEST = 0' and insert. */
867 if (dest)
869 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
870 gimple_set_location (g, loc);
871 gimple_set_block (g, gimple_block (stmt));
872 gsi_insert_before (gsi, g, GSI_SAME_STMT);
875 /* Build 'goto CONT_LABEL' and insert. */
876 g = gimple_build_goto (cont_label);
877 gsi_insert_before (gsi, g, GSI_SAME_STMT);
879 /* Build 'NEXT_LABEL:' and insert. */
880 g = gimple_build_label (next_label);
881 gsi_insert_before (gsi, g, GSI_SAME_STMT);
883 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
884 arg = build_addr (next_label, current_function_decl);
885 t = implicit_built_in_decls[BUILT_IN_SETJMP_RECEIVER];
886 g = gimple_build_call (t, 1, arg);
887 gimple_set_location (g, loc);
888 gimple_set_block (g, gimple_block (stmt));
889 gsi_insert_before (gsi, g, GSI_SAME_STMT);
891 /* Build 'DEST = 1' and insert. */
892 if (dest)
894 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
895 integer_one_node));
896 gimple_set_location (g, loc);
897 gimple_set_block (g, gimple_block (stmt));
898 gsi_insert_before (gsi, g, GSI_SAME_STMT);
901 /* Build 'CONT_LABEL:' and insert. */
902 g = gimple_build_label (cont_label);
903 gsi_insert_before (gsi, g, GSI_SAME_STMT);
905 /* Remove the call to __builtin_setjmp. */
906 gsi_remove (gsi, false);
910 /* Record the variables in VARS into function FN. */
912 void
913 record_vars_into (tree vars, tree fn)
915 if (fn != current_function_decl)
916 push_cfun (DECL_STRUCT_FUNCTION (fn));
918 for (; vars; vars = DECL_CHAIN (vars))
920 tree var = vars;
922 /* BIND_EXPRs contains also function/type/constant declarations
923 we don't need to care about. */
924 if (TREE_CODE (var) != VAR_DECL)
925 continue;
927 /* Nothing to do in this case. */
928 if (DECL_EXTERNAL (var))
929 continue;
931 /* Record the variable. */
932 add_local_decl (cfun, var);
933 if (gimple_referenced_vars (cfun))
934 add_referenced_var (var);
937 if (fn != current_function_decl)
938 pop_cfun ();
942 /* Record the variables in VARS into current_function_decl. */
944 void
945 record_vars (tree vars)
947 record_vars_into (vars, current_function_decl);