* g++.dg/cpp0x/constexpr-53094-2.C: Ignore non-standard ABI
[official-gcc.git] / gcc / gimple-low.c
blobb06d194da657cc371cc84dc6892c2ef30f978697
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2013 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-iterator.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "diagnostic-core.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
37 following:
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
53 tree label;
54 gimple stmt;
56 typedef struct return_statements_t return_statements_t;
59 struct lower_data
61 /* Block the current statement belongs to. */
62 tree block;
64 /* A vector of label and return statements to be moved to the end
65 of the function. */
66 vec<return_statements_t> return_statements;
68 /* True if the current statement cannot fall through. */
69 bool cannot_fallthru;
71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp;
75 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
76 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
77 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_builtin_setjmp (gimple_stmt_iterator *);
82 /* Lower the body of current_function_decl from High GIMPLE into Low
83 GIMPLE. */
85 static unsigned int
86 lower_function_body (void)
88 struct lower_data data;
89 gimple_seq body = gimple_body (current_function_decl);
90 gimple_seq lowered_body;
91 gimple_stmt_iterator i;
92 gimple bind;
93 tree t;
94 gimple x;
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
99 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
101 memset (&data, 0, sizeof (data));
102 data.block = DECL_INITIAL (current_function_decl);
103 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
104 BLOCK_CHAIN (data.block) = NULL_TREE;
105 TREE_ASM_WRITTEN (data.block) = 1;
106 data.return_statements.create (8);
108 bind = gimple_seq_first_stmt (body);
109 lowered_body = NULL;
110 gimple_seq_add_stmt (&lowered_body, bind);
111 i = gsi_start (lowered_body);
112 lower_gimple_bind (&i, &data);
114 i = gsi_last (lowered_body);
116 /* If the function falls off the end, we need a null return statement.
117 If we've already got one in the return_statements vector, we don't
118 need to do anything special. Otherwise build one by hand. */
119 if (gimple_seq_may_fallthru (lowered_body)
120 && (data.return_statements.is_empty ()
121 || gimple_return_retval (data.return_statements.last().stmt) != NULL))
123 x = gimple_build_return (NULL);
124 gimple_set_location (x, cfun->function_end_locus);
125 gimple_set_block (x, DECL_INITIAL (current_function_decl));
126 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
129 /* If we lowered any return statements, emit the representative
130 at the end of the function. */
131 while (!data.return_statements.is_empty ())
133 return_statements_t t = data.return_statements.pop ();
134 x = gimple_build_label (t.label);
135 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
136 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
139 /* If the function calls __builtin_setjmp, we need to emit the computed
140 goto that will serve as the unique dispatcher for all the receivers. */
141 if (data.calls_builtin_setjmp)
143 tree disp_label, disp_var, arg;
145 /* Build 'DISP_LABEL:' and insert. */
146 disp_label = create_artificial_label (cfun->function_end_locus);
147 /* This mark will create forward edges from every call site. */
148 DECL_NONLOCAL (disp_label) = 1;
149 cfun->has_nonlocal_label = 1;
150 x = gimple_build_label (disp_label);
151 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
153 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
154 and insert. */
155 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
156 arg = build_addr (disp_label, current_function_decl);
157 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
158 x = gimple_build_call (t, 1, arg);
159 gimple_call_set_lhs (x, disp_var);
161 /* Build 'goto DISP_VAR;' and insert. */
162 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
163 x = gimple_build_goto (disp_var);
164 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
167 /* Once the old body has been lowered, replace it with the new
168 lowered sequence. */
169 gimple_set_body (current_function_decl, lowered_body);
171 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
172 BLOCK_SUBBLOCKS (data.block)
173 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
175 clear_block_marks (data.block);
176 data.return_statements.release ();
177 return 0;
180 struct gimple_opt_pass pass_lower_cf =
183 GIMPLE_PASS,
184 "lower", /* name */
185 OPTGROUP_NONE, /* optinfo_flags */
186 NULL, /* gate */
187 lower_function_body, /* execute */
188 NULL, /* sub */
189 NULL, /* next */
190 0, /* static_pass_number */
191 TV_NONE, /* tv_id */
192 PROP_gimple_any, /* properties_required */
193 PROP_gimple_lcf, /* properties_provided */
194 0, /* properties_destroyed */
195 0, /* todo_flags_start */
196 0 /* todo_flags_finish */
202 /* Verify if the type of the argument matches that of the function
203 declaration. If we cannot verify this or there is a mismatch,
204 return false. */
206 static bool
207 gimple_check_call_args (gimple stmt, tree fndecl)
209 tree parms, p;
210 unsigned int i, nargs;
212 /* Calls to internal functions always match their signature. */
213 if (gimple_call_internal_p (stmt))
214 return true;
216 nargs = gimple_call_num_args (stmt);
218 /* Get argument types for verification. */
219 if (fndecl)
220 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
221 else
222 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
224 /* Verify if the type of the argument matches that of the function
225 declaration. If we cannot verify this or there is a mismatch,
226 return false. */
227 if (fndecl && DECL_ARGUMENTS (fndecl))
229 for (i = 0, p = DECL_ARGUMENTS (fndecl);
230 i < nargs;
231 i++, p = DECL_CHAIN (p))
233 tree arg;
234 /* We cannot distinguish a varargs function from the case
235 of excess parameters, still deferring the inlining decision
236 to the callee is possible. */
237 if (!p)
238 break;
239 arg = gimple_call_arg (stmt, i);
240 if (p == error_mark_node
241 || arg == error_mark_node
242 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
243 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
244 return false;
247 else if (parms)
249 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
251 tree arg;
252 /* If this is a varargs function defer inlining decision
253 to callee. */
254 if (!p)
255 break;
256 arg = gimple_call_arg (stmt, i);
257 if (TREE_VALUE (p) == error_mark_node
258 || arg == error_mark_node
259 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
260 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
261 && !fold_convertible_p (TREE_VALUE (p), arg)))
262 return false;
265 else
267 if (nargs != 0)
268 return false;
270 return true;
273 /* Verify if the type of the argument and lhs of CALL_STMT matches
274 that of the function declaration CALLEE.
275 If we cannot verify this or there is a mismatch, return false. */
277 bool
278 gimple_check_call_matching_types (gimple call_stmt, tree callee)
280 tree lhs;
282 if ((DECL_RESULT (callee)
283 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
284 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
285 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
286 TREE_TYPE (lhs))
287 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
288 || !gimple_check_call_args (call_stmt, callee))
289 return false;
290 return true;
293 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
294 when they are changed -- if this has to be done, the lowering routine must
295 do it explicitly. DATA is passed through the recursion. */
297 static void
298 lower_sequence (gimple_seq *seq, struct lower_data *data)
300 gimple_stmt_iterator gsi;
302 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
303 lower_stmt (&gsi, data);
307 /* Lower the OpenMP directive statement pointed by GSI. DATA is
308 passed through the recursion. */
310 static void
311 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
313 gimple stmt;
315 stmt = gsi_stmt (*gsi);
317 lower_sequence (gimple_omp_body_ptr (stmt), data);
318 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
319 gimple_omp_set_body (stmt, NULL);
320 gsi_next (gsi);
324 /* Lower statement GSI. DATA is passed through the recursion. We try to
325 track the fallthruness of statements and get rid of unreachable return
326 statements in order to prevent the EH lowering pass from adding useless
327 edges that can cause bogus warnings to be issued later; this guess need
328 not be 100% accurate, simply be conservative and reset cannot_fallthru
329 to false if we don't know. */
331 static void
332 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
334 gimple stmt = gsi_stmt (*gsi);
336 gimple_set_block (stmt, data->block);
338 switch (gimple_code (stmt))
340 case GIMPLE_BIND:
341 lower_gimple_bind (gsi, data);
342 /* Propagate fallthruness. */
343 return;
345 case GIMPLE_COND:
346 case GIMPLE_GOTO:
347 case GIMPLE_SWITCH:
348 data->cannot_fallthru = true;
349 gsi_next (gsi);
350 return;
352 case GIMPLE_RETURN:
353 if (data->cannot_fallthru)
355 gsi_remove (gsi, false);
356 /* Propagate fallthruness. */
358 else
360 lower_gimple_return (gsi, data);
361 data->cannot_fallthru = true;
363 return;
365 case GIMPLE_TRY:
366 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
367 lower_try_catch (gsi, data);
368 else
370 /* It must be a GIMPLE_TRY_FINALLY. */
371 bool cannot_fallthru;
372 lower_sequence (gimple_try_eval_ptr (stmt), data);
373 cannot_fallthru = data->cannot_fallthru;
375 /* The finally clause is always executed after the try clause,
376 so if it does not fall through, then the try-finally will not
377 fall through. Otherwise, if the try clause does not fall
378 through, then when the finally clause falls through it will
379 resume execution wherever the try clause was going. So the
380 whole try-finally will only fall through if both the try
381 clause and the finally clause fall through. */
382 data->cannot_fallthru = false;
383 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
384 data->cannot_fallthru |= cannot_fallthru;
385 gsi_next (gsi);
387 return;
389 case GIMPLE_EH_ELSE:
390 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
391 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
392 break;
394 case GIMPLE_NOP:
395 case GIMPLE_ASM:
396 case GIMPLE_ASSIGN:
397 case GIMPLE_PREDICT:
398 case GIMPLE_LABEL:
399 case GIMPLE_EH_MUST_NOT_THROW:
400 case GIMPLE_OMP_FOR:
401 case GIMPLE_OMP_SECTIONS:
402 case GIMPLE_OMP_SECTIONS_SWITCH:
403 case GIMPLE_OMP_SECTION:
404 case GIMPLE_OMP_SINGLE:
405 case GIMPLE_OMP_MASTER:
406 case GIMPLE_OMP_ORDERED:
407 case GIMPLE_OMP_CRITICAL:
408 case GIMPLE_OMP_RETURN:
409 case GIMPLE_OMP_ATOMIC_LOAD:
410 case GIMPLE_OMP_ATOMIC_STORE:
411 case GIMPLE_OMP_CONTINUE:
412 break;
414 case GIMPLE_CALL:
416 tree decl = gimple_call_fndecl (stmt);
417 unsigned i;
419 for (i = 0; i < gimple_call_num_args (stmt); i++)
421 tree arg = gimple_call_arg (stmt, i);
422 if (EXPR_P (arg))
423 TREE_SET_BLOCK (arg, data->block);
426 if (decl
427 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
428 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
430 lower_builtin_setjmp (gsi);
431 data->cannot_fallthru = false;
432 data->calls_builtin_setjmp = true;
433 return;
436 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
438 data->cannot_fallthru = true;
439 gsi_next (gsi);
440 return;
443 break;
445 case GIMPLE_OMP_PARALLEL:
446 case GIMPLE_OMP_TASK:
447 data->cannot_fallthru = false;
448 lower_omp_directive (gsi, data);
449 data->cannot_fallthru = false;
450 return;
452 case GIMPLE_TRANSACTION:
453 lower_sequence (gimple_transaction_body_ptr (stmt), data);
454 break;
456 default:
457 gcc_unreachable ();
460 data->cannot_fallthru = false;
461 gsi_next (gsi);
464 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
466 static void
467 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
469 tree old_block = data->block;
470 gimple stmt = gsi_stmt (*gsi);
471 tree new_block = gimple_bind_block (stmt);
473 if (new_block)
475 if (new_block == old_block)
477 /* The outermost block of the original function may not be the
478 outermost statement chain of the gimplified function. So we
479 may see the outermost block just inside the function. */
480 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
481 new_block = NULL;
483 else
485 /* We do not expect to handle duplicate blocks. */
486 gcc_assert (!TREE_ASM_WRITTEN (new_block));
487 TREE_ASM_WRITTEN (new_block) = 1;
489 /* Block tree may get clobbered by inlining. Normally this would
490 be fixed in rest_of_decl_compilation using block notes, but
491 since we are not going to emit them, it is up to us. */
492 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
493 BLOCK_SUBBLOCKS (old_block) = new_block;
494 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
495 BLOCK_SUPERCONTEXT (new_block) = old_block;
497 data->block = new_block;
501 record_vars (gimple_bind_vars (stmt));
502 lower_sequence (gimple_bind_body_ptr (stmt), data);
504 if (new_block)
506 gcc_assert (data->block == new_block);
508 BLOCK_SUBBLOCKS (new_block)
509 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
510 data->block = old_block;
513 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
514 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
515 gsi_remove (gsi, false);
518 /* Same as above, but for a GIMPLE_TRY_CATCH. */
520 static void
521 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
523 bool cannot_fallthru;
524 gimple stmt = gsi_stmt (*gsi);
525 gimple_stmt_iterator i;
527 /* We don't handle GIMPLE_TRY_FINALLY. */
528 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
530 lower_sequence (gimple_try_eval_ptr (stmt), data);
531 cannot_fallthru = data->cannot_fallthru;
533 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
534 switch (gimple_code (gsi_stmt (i)))
536 case GIMPLE_CATCH:
537 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
538 catch expression and a body. The whole try/catch may fall
539 through iff any of the catch bodies falls through. */
540 for (; !gsi_end_p (i); gsi_next (&i))
542 data->cannot_fallthru = false;
543 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
544 if (!data->cannot_fallthru)
545 cannot_fallthru = false;
547 break;
549 case GIMPLE_EH_FILTER:
550 /* The exception filter expression only matters if there is an
551 exception. If the exception does not match EH_FILTER_TYPES,
552 we will execute EH_FILTER_FAILURE, and we will fall through
553 if that falls through. If the exception does match
554 EH_FILTER_TYPES, the stack unwinder will continue up the
555 stack, so we will not fall through. We don't know whether we
556 will throw an exception which matches EH_FILTER_TYPES or not,
557 so we just ignore EH_FILTER_TYPES and assume that we might
558 throw an exception which doesn't match. */
559 data->cannot_fallthru = false;
560 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
561 if (!data->cannot_fallthru)
562 cannot_fallthru = false;
563 break;
565 default:
566 /* This case represents statements to be executed when an
567 exception occurs. Those statements are implicitly followed
568 by a GIMPLE_RESX to resume execution after the exception. So
569 in this case the try/catch never falls through. */
570 data->cannot_fallthru = false;
571 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
572 break;
575 data->cannot_fallthru = cannot_fallthru;
576 gsi_next (gsi);
579 /* Try to determine whether a TRY_CATCH expression can fall through.
580 This is a subroutine of block_may_fallthru. */
582 static bool
583 try_catch_may_fallthru (const_tree stmt)
585 tree_stmt_iterator i;
587 /* If the TRY block can fall through, the whole TRY_CATCH can
588 fall through. */
589 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
590 return true;
592 i = tsi_start (TREE_OPERAND (stmt, 1));
593 switch (TREE_CODE (tsi_stmt (i)))
595 case CATCH_EXPR:
596 /* We expect to see a sequence of CATCH_EXPR trees, each with a
597 catch expression and a body. The whole TRY_CATCH may fall
598 through iff any of the catch bodies falls through. */
599 for (; !tsi_end_p (i); tsi_next (&i))
601 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
602 return true;
604 return false;
606 case EH_FILTER_EXPR:
607 /* The exception filter expression only matters if there is an
608 exception. If the exception does not match EH_FILTER_TYPES,
609 we will execute EH_FILTER_FAILURE, and we will fall through
610 if that falls through. If the exception does match
611 EH_FILTER_TYPES, the stack unwinder will continue up the
612 stack, so we will not fall through. We don't know whether we
613 will throw an exception which matches EH_FILTER_TYPES or not,
614 so we just ignore EH_FILTER_TYPES and assume that we might
615 throw an exception which doesn't match. */
616 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
618 default:
619 /* This case represents statements to be executed when an
620 exception occurs. Those statements are implicitly followed
621 by a RESX statement to resume execution after the exception.
622 So in this case the TRY_CATCH never falls through. */
623 return false;
628 /* Same as above, but for a GIMPLE_TRY_CATCH. */
630 static bool
631 gimple_try_catch_may_fallthru (gimple stmt)
633 gimple_stmt_iterator i;
635 /* We don't handle GIMPLE_TRY_FINALLY. */
636 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
638 /* If the TRY block can fall through, the whole TRY_CATCH can
639 fall through. */
640 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
641 return true;
643 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
644 switch (gimple_code (gsi_stmt (i)))
646 case GIMPLE_CATCH:
647 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
648 catch expression and a body. The whole try/catch may fall
649 through iff any of the catch bodies falls through. */
650 for (; !gsi_end_p (i); gsi_next (&i))
652 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
653 return true;
655 return false;
657 case GIMPLE_EH_FILTER:
658 /* The exception filter expression only matters if there is an
659 exception. If the exception does not match EH_FILTER_TYPES,
660 we will execute EH_FILTER_FAILURE, and we will fall through
661 if that falls through. If the exception does match
662 EH_FILTER_TYPES, the stack unwinder will continue up the
663 stack, so we will not fall through. We don't know whether we
664 will throw an exception which matches EH_FILTER_TYPES or not,
665 so we just ignore EH_FILTER_TYPES and assume that we might
666 throw an exception which doesn't match. */
667 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
669 default:
670 /* This case represents statements to be executed when an
671 exception occurs. Those statements are implicitly followed
672 by a GIMPLE_RESX to resume execution after the exception. So
673 in this case the try/catch never falls through. */
674 return false;
679 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
680 need not be 100% accurate; simply be conservative and return true if we
681 don't know. This is used only to avoid stupidly generating extra code.
682 If we're wrong, we'll just delete the extra code later. */
684 bool
685 block_may_fallthru (const_tree block)
687 /* This CONST_CAST is okay because expr_last returns its argument
688 unmodified and we assign it to a const_tree. */
689 const_tree stmt = expr_last (CONST_CAST_TREE(block));
691 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
693 case GOTO_EXPR:
694 case RETURN_EXPR:
695 /* Easy cases. If the last statement of the block implies
696 control transfer, then we can't fall through. */
697 return false;
699 case SWITCH_EXPR:
700 /* If SWITCH_LABELS is set, this is lowered, and represents a
701 branch to a selected label and hence can not fall through.
702 Otherwise SWITCH_BODY is set, and the switch can fall
703 through. */
704 return SWITCH_LABELS (stmt) == NULL_TREE;
706 case COND_EXPR:
707 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
708 return true;
709 return block_may_fallthru (COND_EXPR_ELSE (stmt));
711 case BIND_EXPR:
712 return block_may_fallthru (BIND_EXPR_BODY (stmt));
714 case TRY_CATCH_EXPR:
715 return try_catch_may_fallthru (stmt);
717 case TRY_FINALLY_EXPR:
718 /* The finally clause is always executed after the try clause,
719 so if it does not fall through, then the try-finally will not
720 fall through. Otherwise, if the try clause does not fall
721 through, then when the finally clause falls through it will
722 resume execution wherever the try clause was going. So the
723 whole try-finally will only fall through if both the try
724 clause and the finally clause fall through. */
725 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
726 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
728 case MODIFY_EXPR:
729 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
730 stmt = TREE_OPERAND (stmt, 1);
731 else
732 return true;
733 /* FALLTHRU */
735 case CALL_EXPR:
736 /* Functions that do not return do not fall through. */
737 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
739 case CLEANUP_POINT_EXPR:
740 return block_may_fallthru (TREE_OPERAND (stmt, 0));
742 case TARGET_EXPR:
743 return block_may_fallthru (TREE_OPERAND (stmt, 1));
745 case ERROR_MARK:
746 return true;
748 default:
749 return lang_hooks.block_may_fallthru (stmt);
754 /* Try to determine if we can continue executing the statement
755 immediately following STMT. This guess need not be 100% accurate;
756 simply be conservative and return true if we don't know. This is
757 used only to avoid stupidly generating extra code. If we're wrong,
758 we'll just delete the extra code later. */
760 bool
761 gimple_stmt_may_fallthru (gimple stmt)
763 if (!stmt)
764 return true;
766 switch (gimple_code (stmt))
768 case GIMPLE_GOTO:
769 case GIMPLE_RETURN:
770 case GIMPLE_RESX:
771 /* Easy cases. If the last statement of the seq implies
772 control transfer, then we can't fall through. */
773 return false;
775 case GIMPLE_SWITCH:
776 /* Switch has already been lowered and represents a branch
777 to a selected label and hence can't fall through. */
778 return false;
780 case GIMPLE_COND:
781 /* GIMPLE_COND's are already lowered into a two-way branch. They
782 can't fall through. */
783 return false;
785 case GIMPLE_BIND:
786 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
788 case GIMPLE_TRY:
789 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
790 return gimple_try_catch_may_fallthru (stmt);
792 /* It must be a GIMPLE_TRY_FINALLY. */
794 /* The finally clause is always executed after the try clause,
795 so if it does not fall through, then the try-finally will not
796 fall through. Otherwise, if the try clause does not fall
797 through, then when the finally clause falls through it will
798 resume execution wherever the try clause was going. So the
799 whole try-finally will only fall through if both the try
800 clause and the finally clause fall through. */
801 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
802 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
804 case GIMPLE_EH_ELSE:
805 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
806 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
808 case GIMPLE_CALL:
809 /* Functions that do not return do not fall through. */
810 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
812 default:
813 return true;
818 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
820 bool
821 gimple_seq_may_fallthru (gimple_seq seq)
823 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
827 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
829 static void
830 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
832 gimple stmt = gsi_stmt (*gsi);
833 gimple t;
834 int i;
835 return_statements_t tmp_rs;
837 /* Match this up with an existing return statement that's been created. */
838 for (i = data->return_statements.length () - 1;
839 i >= 0; i--)
841 tmp_rs = data->return_statements[i];
843 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
845 /* Remove the line number from the representative return statement.
846 It now fills in for many such returns. Failure to remove this
847 will result in incorrect results for coverage analysis. */
848 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
850 goto found;
854 /* Not found. Create a new label and record the return statement. */
855 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
856 tmp_rs.stmt = stmt;
857 data->return_statements.safe_push (tmp_rs);
859 /* Generate a goto statement and remove the return statement. */
860 found:
861 /* When not optimizing, make sure user returns are preserved. */
862 if (!optimize && gimple_has_location (stmt))
863 DECL_ARTIFICIAL (tmp_rs.label) = 0;
864 t = gimple_build_goto (tmp_rs.label);
865 gimple_set_location (t, gimple_location (stmt));
866 gimple_set_block (t, gimple_block (stmt));
867 gsi_insert_before (gsi, t, GSI_SAME_STMT);
868 gsi_remove (gsi, false);
871 /* Lower a __builtin_setjmp GSI.
873 __builtin_setjmp is passed a pointer to an array of five words (not
874 all will be used on all machines). It operates similarly to the C
875 library function of the same name, but is more efficient.
877 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
878 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
879 __builtin_setjmp_dispatcher shared among all the instances; that's
880 why it is only emitted at the end by lower_function_body.
882 After full lowering, the body of the function should look like:
885 void * setjmpvar.0;
886 int D.1844;
887 int D.2844;
889 [...]
891 __builtin_setjmp_setup (&buf, &<D1847>);
892 D.1844 = 0;
893 goto <D1846>;
894 <D1847>:;
895 __builtin_setjmp_receiver (&<D1847>);
896 D.1844 = 1;
897 <D1846>:;
898 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
900 [...]
902 __builtin_setjmp_setup (&buf, &<D2847>);
903 D.2844 = 0;
904 goto <D2846>;
905 <D2847>:;
906 __builtin_setjmp_receiver (&<D2847>);
907 D.2844 = 1;
908 <D2846>:;
909 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
911 [...]
913 <D3850>:;
914 return;
915 <D3853>: [non-local];
916 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
917 goto setjmpvar.0;
920 The dispatcher block will be both the unique destination of all the
921 abnormal call edges and the unique source of all the abnormal edges
922 to the receivers, thus keeping the complexity explosion localized. */
924 static void
925 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
927 gimple stmt = gsi_stmt (*gsi);
928 location_t loc = gimple_location (stmt);
929 tree cont_label = create_artificial_label (loc);
930 tree next_label = create_artificial_label (loc);
931 tree dest, t, arg;
932 gimple g;
934 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
935 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
936 FORCED_LABEL (next_label) = 1;
938 dest = gimple_call_lhs (stmt);
940 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
941 arg = build_addr (next_label, current_function_decl);
942 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
943 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
944 gimple_set_location (g, loc);
945 gimple_set_block (g, gimple_block (stmt));
946 gsi_insert_before (gsi, g, GSI_SAME_STMT);
948 /* Build 'DEST = 0' and insert. */
949 if (dest)
951 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
952 gimple_set_location (g, loc);
953 gimple_set_block (g, gimple_block (stmt));
954 gsi_insert_before (gsi, g, GSI_SAME_STMT);
957 /* Build 'goto CONT_LABEL' and insert. */
958 g = gimple_build_goto (cont_label);
959 gsi_insert_before (gsi, g, GSI_SAME_STMT);
961 /* Build 'NEXT_LABEL:' and insert. */
962 g = gimple_build_label (next_label);
963 gsi_insert_before (gsi, g, GSI_SAME_STMT);
965 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
966 arg = build_addr (next_label, current_function_decl);
967 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
968 g = gimple_build_call (t, 1, arg);
969 gimple_set_location (g, loc);
970 gimple_set_block (g, gimple_block (stmt));
971 gsi_insert_before (gsi, g, GSI_SAME_STMT);
973 /* Build 'DEST = 1' and insert. */
974 if (dest)
976 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
977 integer_one_node));
978 gimple_set_location (g, loc);
979 gimple_set_block (g, gimple_block (stmt));
980 gsi_insert_before (gsi, g, GSI_SAME_STMT);
983 /* Build 'CONT_LABEL:' and insert. */
984 g = gimple_build_label (cont_label);
985 gsi_insert_before (gsi, g, GSI_SAME_STMT);
987 /* Remove the call to __builtin_setjmp. */
988 gsi_remove (gsi, false);
992 /* Record the variables in VARS into function FN. */
994 void
995 record_vars_into (tree vars, tree fn)
997 bool change_cfun = fn != current_function_decl;
999 if (change_cfun)
1000 push_cfun (DECL_STRUCT_FUNCTION (fn));
1002 for (; vars; vars = DECL_CHAIN (vars))
1004 tree var = vars;
1006 /* BIND_EXPRs contains also function/type/constant declarations
1007 we don't need to care about. */
1008 if (TREE_CODE (var) != VAR_DECL)
1009 continue;
1011 /* Nothing to do in this case. */
1012 if (DECL_EXTERNAL (var))
1013 continue;
1015 /* Record the variable. */
1016 add_local_decl (cfun, var);
1019 if (change_cfun)
1020 pop_cfun ();
1024 /* Record the variables in VARS into current_function_decl. */
1026 void
1027 record_vars (tree vars)
1029 record_vars_into (vars, current_function_decl);