kernel - Fix races created by a comedy of circumstansces (3)
[dragonfly.git] / contrib / gcc-4.7 / gcc / gimple-low.c
blob1f74512e7f699a79eb2698685d8b54bc9f6c5701
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
35 #include "langhooks.h"
37 /* The differences between High GIMPLE and Low GIMPLE are the
38 following:
40 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
42 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43 flow and exception regions are built as an on-the-side region
44 hierarchy (See tree-eh.c:lower_eh_constructs).
46 3- Multiple identical return statements are grouped into a single
47 return and gotos to the unique return site. */
49 /* Match a return statement with a label. During lowering, we identify
50 identical return statements and replace duplicates with a jump to
51 the corresponding label. */
52 struct return_statements_t
54 tree label;
55 gimple stmt;
57 typedef struct return_statements_t return_statements_t;
59 DEF_VEC_O(return_statements_t);
60 DEF_VEC_ALLOC_O(return_statements_t,heap);
62 struct lower_data
64 /* Block the current statement belongs to. */
65 tree block;
67 /* A vector of label and return statements to be moved to the end
68 of the function. */
69 VEC(return_statements_t,heap) *return_statements;
71 /* True if the current statement cannot fall through. */
72 bool cannot_fallthru;
74 /* True if the function calls __builtin_setjmp. */
75 bool calls_builtin_setjmp;
78 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
81 static void lower_builtin_setjmp (gimple_stmt_iterator *);
84 /* Lower the body of current_function_decl from High GIMPLE into Low
85 GIMPLE. */
87 static unsigned int
88 lower_function_body (void)
90 struct lower_data data;
91 gimple_seq body = gimple_body (current_function_decl);
92 gimple_seq lowered_body;
93 gimple_stmt_iterator i;
94 gimple bind;
95 tree t;
96 gimple x;
98 /* The gimplifier should've left a body of exactly one statement,
99 namely a GIMPLE_BIND. */
100 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
101 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
103 memset (&data, 0, sizeof (data));
104 data.block = DECL_INITIAL (current_function_decl);
105 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
106 BLOCK_CHAIN (data.block) = NULL_TREE;
107 TREE_ASM_WRITTEN (data.block) = 1;
108 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
110 bind = gimple_seq_first_stmt (body);
111 lowered_body = NULL;
112 gimple_seq_add_stmt (&lowered_body, bind);
113 i = gsi_start (lowered_body);
114 lower_gimple_bind (&i, &data);
116 /* Once the old body has been lowered, replace it with the new
117 lowered sequence. */
118 gimple_set_body (current_function_decl, lowered_body);
120 i = gsi_last (lowered_body);
122 /* If the function falls off the end, we need a null return statement.
123 If we've already got one in the return_statements vector, we don't
124 need to do anything special. Otherwise build one by hand. */
125 if (gimple_seq_may_fallthru (lowered_body)
126 && (VEC_empty (return_statements_t, data.return_statements)
127 || gimple_return_retval (VEC_last (return_statements_t,
128 data.return_statements)->stmt) != NULL))
130 x = gimple_build_return (NULL);
131 gimple_set_location (x, cfun->function_end_locus);
132 gimple_set_block (x, DECL_INITIAL (current_function_decl));
133 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
136 /* If we lowered any return statements, emit the representative
137 at the end of the function. */
138 while (!VEC_empty (return_statements_t, data.return_statements))
140 return_statements_t t;
142 /* Unfortunately, we can't use VEC_pop because it returns void for
143 objects. */
144 t = *VEC_last (return_statements_t, data.return_statements);
145 VEC_truncate (return_statements_t,
146 data.return_statements,
147 VEC_length (return_statements_t,
148 data.return_statements) - 1);
150 x = gimple_build_label (t.label);
151 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
152 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
155 /* If the function calls __builtin_setjmp, we need to emit the computed
156 goto that will serve as the unique dispatcher for all the receivers. */
157 if (data.calls_builtin_setjmp)
159 tree disp_label, disp_var, arg;
161 /* Build 'DISP_LABEL:' and insert. */
162 disp_label = create_artificial_label (cfun->function_end_locus);
163 /* This mark will create forward edges from every call site. */
164 DECL_NONLOCAL (disp_label) = 1;
165 cfun->has_nonlocal_label = 1;
166 x = gimple_build_label (disp_label);
167 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
169 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
170 and insert. */
171 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
172 arg = build_addr (disp_label, current_function_decl);
173 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
174 x = gimple_build_call (t, 1, arg);
175 gimple_call_set_lhs (x, disp_var);
177 /* Build 'goto DISP_VAR;' and insert. */
178 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
179 x = gimple_build_goto (disp_var);
180 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
183 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
184 BLOCK_SUBBLOCKS (data.block)
185 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
187 clear_block_marks (data.block);
188 VEC_free(return_statements_t, heap, data.return_statements);
189 return 0;
192 struct gimple_opt_pass pass_lower_cf =
195 GIMPLE_PASS,
196 "lower", /* name */
197 NULL, /* gate */
198 lower_function_body, /* execute */
199 NULL, /* sub */
200 NULL, /* next */
201 0, /* static_pass_number */
202 TV_NONE, /* tv_id */
203 PROP_gimple_any, /* properties_required */
204 PROP_gimple_lcf, /* properties_provided */
205 0, /* properties_destroyed */
206 0, /* todo_flags_start */
207 0 /* todo_flags_finish */
213 /* Verify if the type of the argument matches that of the function
214 declaration. If we cannot verify this or there is a mismatch,
215 return false. */
217 static bool
218 gimple_check_call_args (gimple stmt, tree fndecl)
220 tree parms, p;
221 unsigned int i, nargs;
223 /* Calls to internal functions always match their signature. */
224 if (gimple_call_internal_p (stmt))
225 return true;
227 nargs = gimple_call_num_args (stmt);
229 /* Get argument types for verification. */
230 if (fndecl)
231 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
232 else
233 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
235 /* Verify if the type of the argument matches that of the function
236 declaration. If we cannot verify this or there is a mismatch,
237 return false. */
238 if (fndecl && DECL_ARGUMENTS (fndecl))
240 for (i = 0, p = DECL_ARGUMENTS (fndecl);
241 i < nargs;
242 i++, p = DECL_CHAIN (p))
244 tree arg;
245 /* We cannot distinguish a varargs function from the case
246 of excess parameters, still deferring the inlining decision
247 to the callee is possible. */
248 if (!p)
249 break;
250 arg = gimple_call_arg (stmt, i);
251 if (p == error_mark_node
252 || DECL_ARG_TYPE (p) == error_mark_node
253 || arg == error_mark_node
254 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
255 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
256 return false;
259 else if (parms)
261 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
263 tree arg;
264 /* If this is a varargs function defer inlining decision
265 to callee. */
266 if (!p)
267 break;
268 arg = gimple_call_arg (stmt, i);
269 if (TREE_VALUE (p) == error_mark_node
270 || arg == error_mark_node
271 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
272 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
273 && !fold_convertible_p (TREE_VALUE (p), arg)))
274 return false;
277 else
279 if (nargs != 0)
280 return false;
282 return true;
285 /* Verify if the type of the argument and lhs of CALL_STMT matches
286 that of the function declaration CALLEE.
287 If we cannot verify this or there is a mismatch, return false. */
289 bool
290 gimple_check_call_matching_types (gimple call_stmt, tree callee)
292 tree lhs;
294 if ((DECL_RESULT (callee)
295 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
296 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
297 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
298 TREE_TYPE (lhs))
299 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
300 || !gimple_check_call_args (call_stmt, callee))
301 return false;
302 return true;
305 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
306 when they are changed -- if this has to be done, the lowering routine must
307 do it explicitly. DATA is passed through the recursion. */
309 static void
310 lower_sequence (gimple_seq seq, struct lower_data *data)
312 gimple_stmt_iterator gsi;
314 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
315 lower_stmt (&gsi, data);
319 /* Lower the OpenMP directive statement pointed by GSI. DATA is
320 passed through the recursion. */
322 static void
323 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
325 gimple stmt;
327 stmt = gsi_stmt (*gsi);
329 lower_sequence (gimple_omp_body (stmt), data);
330 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
331 gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
332 gimple_omp_set_body (stmt, NULL);
333 gsi_remove (gsi, false);
337 /* Lower statement GSI. DATA is passed through the recursion. We try to
338 track the fallthruness of statements and get rid of unreachable return
339 statements in order to prevent the EH lowering pass from adding useless
340 edges that can cause bogus warnings to be issued later; this guess need
341 not be 100% accurate, simply be conservative and reset cannot_fallthru
342 to false if we don't know. */
344 static void
345 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
347 gimple stmt = gsi_stmt (*gsi);
349 gimple_set_block (stmt, data->block);
351 switch (gimple_code (stmt))
353 case GIMPLE_BIND:
354 lower_gimple_bind (gsi, data);
355 /* Propagate fallthruness. */
356 return;
358 case GIMPLE_COND:
359 case GIMPLE_GOTO:
360 case GIMPLE_SWITCH:
361 data->cannot_fallthru = true;
362 gsi_next (gsi);
363 return;
365 case GIMPLE_RETURN:
366 if (data->cannot_fallthru)
368 gsi_remove (gsi, false);
369 /* Propagate fallthruness. */
371 else
373 lower_gimple_return (gsi, data);
374 data->cannot_fallthru = true;
376 return;
378 case GIMPLE_TRY:
380 bool try_cannot_fallthru;
381 lower_sequence (gimple_try_eval (stmt), data);
382 try_cannot_fallthru = data->cannot_fallthru;
383 data->cannot_fallthru = false;
384 lower_sequence (gimple_try_cleanup (stmt), data);
385 /* See gimple_stmt_may_fallthru for the rationale. */
386 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
388 data->cannot_fallthru |= try_cannot_fallthru;
389 gsi_next (gsi);
390 return;
393 break;
395 case GIMPLE_CATCH:
396 data->cannot_fallthru = false;
397 lower_sequence (gimple_catch_handler (stmt), data);
398 break;
400 case GIMPLE_EH_FILTER:
401 data->cannot_fallthru = false;
402 lower_sequence (gimple_eh_filter_failure (stmt), data);
403 break;
405 case GIMPLE_EH_ELSE:
406 lower_sequence (gimple_eh_else_n_body (stmt), data);
407 lower_sequence (gimple_eh_else_e_body (stmt), data);
408 break;
410 case GIMPLE_NOP:
411 case GIMPLE_ASM:
412 case GIMPLE_ASSIGN:
413 case GIMPLE_PREDICT:
414 case GIMPLE_LABEL:
415 case GIMPLE_EH_MUST_NOT_THROW:
416 case GIMPLE_OMP_FOR:
417 case GIMPLE_OMP_SECTIONS:
418 case GIMPLE_OMP_SECTIONS_SWITCH:
419 case GIMPLE_OMP_SECTION:
420 case GIMPLE_OMP_SINGLE:
421 case GIMPLE_OMP_MASTER:
422 case GIMPLE_OMP_ORDERED:
423 case GIMPLE_OMP_CRITICAL:
424 case GIMPLE_OMP_RETURN:
425 case GIMPLE_OMP_ATOMIC_LOAD:
426 case GIMPLE_OMP_ATOMIC_STORE:
427 case GIMPLE_OMP_CONTINUE:
428 break;
430 case GIMPLE_CALL:
432 tree decl = gimple_call_fndecl (stmt);
434 if (decl
435 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
436 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
438 lower_builtin_setjmp (gsi);
439 data->cannot_fallthru = false;
440 data->calls_builtin_setjmp = true;
441 return;
444 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
446 data->cannot_fallthru = true;
447 gsi_next (gsi);
448 return;
451 break;
453 case GIMPLE_OMP_PARALLEL:
454 case GIMPLE_OMP_TASK:
455 data->cannot_fallthru = false;
456 lower_omp_directive (gsi, data);
457 data->cannot_fallthru = false;
458 return;
460 case GIMPLE_TRANSACTION:
461 lower_sequence (gimple_transaction_body (stmt), data);
462 break;
464 default:
465 gcc_unreachable ();
468 data->cannot_fallthru = false;
469 gsi_next (gsi);
472 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
474 static void
475 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
477 tree old_block = data->block;
478 gimple stmt = gsi_stmt (*gsi);
479 tree new_block = gimple_bind_block (stmt);
481 if (new_block)
483 if (new_block == old_block)
485 /* The outermost block of the original function may not be the
486 outermost statement chain of the gimplified function. So we
487 may see the outermost block just inside the function. */
488 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
489 new_block = NULL;
491 else
493 /* We do not expect to handle duplicate blocks. */
494 gcc_assert (!TREE_ASM_WRITTEN (new_block));
495 TREE_ASM_WRITTEN (new_block) = 1;
497 /* Block tree may get clobbered by inlining. Normally this would
498 be fixed in rest_of_decl_compilation using block notes, but
499 since we are not going to emit them, it is up to us. */
500 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
501 BLOCK_SUBBLOCKS (old_block) = new_block;
502 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
503 BLOCK_SUPERCONTEXT (new_block) = old_block;
505 data->block = new_block;
509 record_vars (gimple_bind_vars (stmt));
510 lower_sequence (gimple_bind_body (stmt), data);
512 if (new_block)
514 gcc_assert (data->block == new_block);
516 BLOCK_SUBBLOCKS (new_block)
517 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
518 data->block = old_block;
521 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
522 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
523 gsi_remove (gsi, false);
526 /* Try to determine whether a TRY_CATCH expression can fall through.
527 This is a subroutine of block_may_fallthru. */
529 static bool
530 try_catch_may_fallthru (const_tree stmt)
532 tree_stmt_iterator i;
534 /* If the TRY block can fall through, the whole TRY_CATCH can
535 fall through. */
536 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
537 return true;
539 i = tsi_start (TREE_OPERAND (stmt, 1));
540 switch (TREE_CODE (tsi_stmt (i)))
542 case CATCH_EXPR:
543 /* We expect to see a sequence of CATCH_EXPR trees, each with a
544 catch expression and a body. The whole TRY_CATCH may fall
545 through iff any of the catch bodies falls through. */
546 for (; !tsi_end_p (i); tsi_next (&i))
548 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
549 return true;
551 return false;
553 case EH_FILTER_EXPR:
554 /* The exception filter expression only matters if there is an
555 exception. If the exception does not match EH_FILTER_TYPES,
556 we will execute EH_FILTER_FAILURE, and we will fall through
557 if that falls through. If the exception does match
558 EH_FILTER_TYPES, the stack unwinder will continue up the
559 stack, so we will not fall through. We don't know whether we
560 will throw an exception which matches EH_FILTER_TYPES or not,
561 so we just ignore EH_FILTER_TYPES and assume that we might
562 throw an exception which doesn't match. */
563 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
565 default:
566 /* This case represents statements to be executed when an
567 exception occurs. Those statements are implicitly followed
568 by a RESX statement to resume execution after the exception.
569 So in this case the TRY_CATCH never falls through. */
570 return false;
575 /* Same as above, but for a GIMPLE_TRY_CATCH. */
577 static bool
578 gimple_try_catch_may_fallthru (gimple stmt)
580 gimple_stmt_iterator i;
582 /* We don't handle GIMPLE_TRY_FINALLY. */
583 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
585 /* If the TRY block can fall through, the whole TRY_CATCH can
586 fall through. */
587 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
588 return true;
590 i = gsi_start (gimple_try_cleanup (stmt));
591 switch (gimple_code (gsi_stmt (i)))
593 case GIMPLE_CATCH:
594 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
595 catch expression and a body. The whole try/catch may fall
596 through iff any of the catch bodies falls through. */
597 for (; !gsi_end_p (i); gsi_next (&i))
599 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
600 return true;
602 return false;
604 case GIMPLE_EH_FILTER:
605 /* The exception filter expression only matters if there is an
606 exception. If the exception does not match EH_FILTER_TYPES,
607 we will execute EH_FILTER_FAILURE, and we will fall through
608 if that falls through. If the exception does match
609 EH_FILTER_TYPES, the stack unwinder will continue up the
610 stack, so we will not fall through. We don't know whether we
611 will throw an exception which matches EH_FILTER_TYPES or not,
612 so we just ignore EH_FILTER_TYPES and assume that we might
613 throw an exception which doesn't match. */
614 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
616 default:
617 /* This case represents statements to be executed when an
618 exception occurs. Those statements are implicitly followed
619 by a GIMPLE_RESX to resume execution after the exception. So
620 in this case the try/catch never falls through. */
621 return false;
626 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
627 need not be 100% accurate; simply be conservative and return true if we
628 don't know. This is used only to avoid stupidly generating extra code.
629 If we're wrong, we'll just delete the extra code later. */
631 bool
632 block_may_fallthru (const_tree block)
634 /* This CONST_CAST is okay because expr_last returns its argument
635 unmodified and we assign it to a const_tree. */
636 const_tree stmt = expr_last (CONST_CAST_TREE(block));
638 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
640 case GOTO_EXPR:
641 case RETURN_EXPR:
642 /* Easy cases. If the last statement of the block implies
643 control transfer, then we can't fall through. */
644 return false;
646 case SWITCH_EXPR:
647 /* If SWITCH_LABELS is set, this is lowered, and represents a
648 branch to a selected label and hence can not fall through.
649 Otherwise SWITCH_BODY is set, and the switch can fall
650 through. */
651 return SWITCH_LABELS (stmt) == NULL_TREE;
653 case COND_EXPR:
654 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
655 return true;
656 return block_may_fallthru (COND_EXPR_ELSE (stmt));
658 case BIND_EXPR:
659 return block_may_fallthru (BIND_EXPR_BODY (stmt));
661 case TRY_CATCH_EXPR:
662 return try_catch_may_fallthru (stmt);
664 case TRY_FINALLY_EXPR:
665 /* The finally clause is always executed after the try clause,
666 so if it does not fall through, then the try-finally will not
667 fall through. Otherwise, if the try clause does not fall
668 through, then when the finally clause falls through it will
669 resume execution wherever the try clause was going. So the
670 whole try-finally will only fall through if both the try
671 clause and the finally clause fall through. */
672 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
673 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
675 case MODIFY_EXPR:
676 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
677 stmt = TREE_OPERAND (stmt, 1);
678 else
679 return true;
680 /* FALLTHRU */
682 case CALL_EXPR:
683 /* Functions that do not return do not fall through. */
684 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
686 case CLEANUP_POINT_EXPR:
687 return block_may_fallthru (TREE_OPERAND (stmt, 0));
689 case TARGET_EXPR:
690 return block_may_fallthru (TREE_OPERAND (stmt, 1));
692 case ERROR_MARK:
693 return true;
695 default:
696 return lang_hooks.block_may_fallthru (stmt);
701 /* Try to determine if we can continue executing the statement
702 immediately following STMT. This guess need not be 100% accurate;
703 simply be conservative and return true if we don't know. This is
704 used only to avoid stupidly generating extra code. If we're wrong,
705 we'll just delete the extra code later. */
707 bool
708 gimple_stmt_may_fallthru (gimple stmt)
710 if (!stmt)
711 return true;
713 switch (gimple_code (stmt))
715 case GIMPLE_GOTO:
716 case GIMPLE_RETURN:
717 case GIMPLE_RESX:
718 /* Easy cases. If the last statement of the seq implies
719 control transfer, then we can't fall through. */
720 return false;
722 case GIMPLE_SWITCH:
723 /* Switch has already been lowered and represents a branch
724 to a selected label and hence can't fall through. */
725 return false;
727 case GIMPLE_COND:
728 /* GIMPLE_COND's are already lowered into a two-way branch. They
729 can't fall through. */
730 return false;
732 case GIMPLE_BIND:
733 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
735 case GIMPLE_TRY:
736 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
737 return gimple_try_catch_may_fallthru (stmt);
739 /* It must be a GIMPLE_TRY_FINALLY. */
741 /* The finally clause is always executed after the try clause,
742 so if it does not fall through, then the try-finally will not
743 fall through. Otherwise, if the try clause does not fall
744 through, then when the finally clause falls through it will
745 resume execution wherever the try clause was going. So the
746 whole try-finally will only fall through if both the try
747 clause and the finally clause fall through. */
748 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
749 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
751 case GIMPLE_EH_ELSE:
752 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
753 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
755 case GIMPLE_CALL:
756 /* Functions that do not return do not fall through. */
757 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
759 default:
760 return true;
765 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
767 bool
768 gimple_seq_may_fallthru (gimple_seq seq)
770 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
774 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
776 static void
777 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
779 gimple stmt = gsi_stmt (*gsi);
780 gimple t;
781 int i;
782 return_statements_t tmp_rs;
784 /* Match this up with an existing return statement that's been created. */
785 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
786 i >= 0; i--)
788 tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
790 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
792 /* Remove the line number from the representative return statement.
793 It now fills in for many such returns. Failure to remove this
794 will result in incorrect results for coverage analysis. */
795 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
797 goto found;
801 /* Not found. Create a new label and record the return statement. */
802 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
803 tmp_rs.stmt = stmt;
804 VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
806 /* Generate a goto statement and remove the return statement. */
807 found:
808 /* When not optimizing, make sure user returns are preserved. */
809 if (!optimize && gimple_has_location (stmt))
810 DECL_ARTIFICIAL (tmp_rs.label) = 0;
811 t = gimple_build_goto (tmp_rs.label);
812 gimple_set_location (t, gimple_location (stmt));
813 gimple_set_block (t, gimple_block (stmt));
814 gsi_insert_before (gsi, t, GSI_SAME_STMT);
815 gsi_remove (gsi, false);
818 /* Lower a __builtin_setjmp GSI.
820 __builtin_setjmp is passed a pointer to an array of five words (not
821 all will be used on all machines). It operates similarly to the C
822 library function of the same name, but is more efficient.
824 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
825 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
826 __builtin_setjmp_dispatcher shared among all the instances; that's
827 why it is only emitted at the end by lower_function_body.
829 After full lowering, the body of the function should look like:
832 void * setjmpvar.0;
833 int D.1844;
834 int D.2844;
836 [...]
838 __builtin_setjmp_setup (&buf, &<D1847>);
839 D.1844 = 0;
840 goto <D1846>;
841 <D1847>:;
842 __builtin_setjmp_receiver (&<D1847>);
843 D.1844 = 1;
844 <D1846>:;
845 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
847 [...]
849 __builtin_setjmp_setup (&buf, &<D2847>);
850 D.2844 = 0;
851 goto <D2846>;
852 <D2847>:;
853 __builtin_setjmp_receiver (&<D2847>);
854 D.2844 = 1;
855 <D2846>:;
856 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
858 [...]
860 <D3850>:;
861 return;
862 <D3853>: [non-local];
863 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
864 goto setjmpvar.0;
867 The dispatcher block will be both the unique destination of all the
868 abnormal call edges and the unique source of all the abnormal edges
869 to the receivers, thus keeping the complexity explosion localized. */
871 static void
872 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
874 gimple stmt = gsi_stmt (*gsi);
875 location_t loc = gimple_location (stmt);
876 tree cont_label = create_artificial_label (loc);
877 tree next_label = create_artificial_label (loc);
878 tree dest, t, arg;
879 gimple g;
881 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
882 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
883 FORCED_LABEL (next_label) = 1;
885 dest = gimple_call_lhs (stmt);
887 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
888 arg = build_addr (next_label, current_function_decl);
889 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
890 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
891 gimple_set_location (g, loc);
892 gimple_set_block (g, gimple_block (stmt));
893 gsi_insert_before (gsi, g, GSI_SAME_STMT);
895 /* Build 'DEST = 0' and insert. */
896 if (dest)
898 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
899 gimple_set_location (g, loc);
900 gimple_set_block (g, gimple_block (stmt));
901 gsi_insert_before (gsi, g, GSI_SAME_STMT);
904 /* Build 'goto CONT_LABEL' and insert. */
905 g = gimple_build_goto (cont_label);
906 gsi_insert_before (gsi, g, GSI_SAME_STMT);
908 /* Build 'NEXT_LABEL:' and insert. */
909 g = gimple_build_label (next_label);
910 gsi_insert_before (gsi, g, GSI_SAME_STMT);
912 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
913 arg = build_addr (next_label, current_function_decl);
914 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
915 g = gimple_build_call (t, 1, arg);
916 gimple_set_location (g, loc);
917 gimple_set_block (g, gimple_block (stmt));
918 gsi_insert_before (gsi, g, GSI_SAME_STMT);
920 /* Build 'DEST = 1' and insert. */
921 if (dest)
923 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
924 integer_one_node));
925 gimple_set_location (g, loc);
926 gimple_set_block (g, gimple_block (stmt));
927 gsi_insert_before (gsi, g, GSI_SAME_STMT);
930 /* Build 'CONT_LABEL:' and insert. */
931 g = gimple_build_label (cont_label);
932 gsi_insert_before (gsi, g, GSI_SAME_STMT);
934 /* Remove the call to __builtin_setjmp. */
935 gsi_remove (gsi, false);
939 /* Record the variables in VARS into function FN. */
941 void
942 record_vars_into (tree vars, tree fn)
944 if (fn != current_function_decl)
945 push_cfun (DECL_STRUCT_FUNCTION (fn));
947 for (; vars; vars = DECL_CHAIN (vars))
949 tree var = vars;
951 /* BIND_EXPRs contains also function/type/constant declarations
952 we don't need to care about. */
953 if (TREE_CODE (var) != VAR_DECL)
954 continue;
956 /* Nothing to do in this case. */
957 if (DECL_EXTERNAL (var))
958 continue;
960 /* Record the variable. */
961 add_local_decl (cfun, var);
962 if (gimple_referenced_vars (cfun))
963 add_referenced_var (var);
966 if (fn != current_function_decl)
967 pop_cfun ();
971 /* Record the variables in VARS into current_function_decl. */
973 void
974 record_vars (tree vars)
976 record_vars_into (vars, current_function_decl);