pretty-print.h (pp_base): Remove.
[official-gcc.git] / gcc / gimple-low.c
blob64cc031785d3722246252eb4d80b98319b6d6d3a
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2013 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-iterator.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "diagnostic-core.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
37 following:
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
53 tree label;
54 gimple stmt;
56 typedef struct return_statements_t return_statements_t;
59 struct lower_data
61 /* Block the current statement belongs to. */
62 tree block;
64 /* A vector of label and return statements to be moved to the end
65 of the function. */
66 vec<return_statements_t> return_statements;
68 /* True if the current statement cannot fall through. */
69 bool cannot_fallthru;
71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp;
75 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
76 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
77 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_builtin_setjmp (gimple_stmt_iterator *);
82 /* Lower the body of current_function_decl from High GIMPLE into Low
83 GIMPLE. */
85 static unsigned int
86 lower_function_body (void)
88 struct lower_data data;
89 gimple_seq body = gimple_body (current_function_decl);
90 gimple_seq lowered_body;
91 gimple_stmt_iterator i;
92 gimple bind;
93 tree t;
94 gimple x;
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
99 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
101 memset (&data, 0, sizeof (data));
102 data.block = DECL_INITIAL (current_function_decl);
103 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
104 BLOCK_CHAIN (data.block) = NULL_TREE;
105 TREE_ASM_WRITTEN (data.block) = 1;
106 data.return_statements.create (8);
108 bind = gimple_seq_first_stmt (body);
109 lowered_body = NULL;
110 gimple_seq_add_stmt (&lowered_body, bind);
111 i = gsi_start (lowered_body);
112 lower_gimple_bind (&i, &data);
114 i = gsi_last (lowered_body);
116 /* If the function falls off the end, we need a null return statement.
117 If we've already got one in the return_statements vector, we don't
118 need to do anything special. Otherwise build one by hand. */
119 if (gimple_seq_may_fallthru (lowered_body)
120 && (data.return_statements.is_empty ()
121 || gimple_return_retval (data.return_statements.last().stmt) != NULL))
123 x = gimple_build_return (NULL);
124 gimple_set_location (x, cfun->function_end_locus);
125 gimple_set_block (x, DECL_INITIAL (current_function_decl));
126 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
129 /* If we lowered any return statements, emit the representative
130 at the end of the function. */
131 while (!data.return_statements.is_empty ())
133 return_statements_t t = data.return_statements.pop ();
134 x = gimple_build_label (t.label);
135 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
136 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
139 /* If the function calls __builtin_setjmp, we need to emit the computed
140 goto that will serve as the unique dispatcher for all the receivers. */
141 if (data.calls_builtin_setjmp)
143 tree disp_label, disp_var, arg;
145 /* Build 'DISP_LABEL:' and insert. */
146 disp_label = create_artificial_label (cfun->function_end_locus);
147 /* This mark will create forward edges from every call site. */
148 DECL_NONLOCAL (disp_label) = 1;
149 cfun->has_nonlocal_label = 1;
150 x = gimple_build_label (disp_label);
151 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
153 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
154 and insert. */
155 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
156 arg = build_addr (disp_label, current_function_decl);
157 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
158 x = gimple_build_call (t, 1, arg);
159 gimple_call_set_lhs (x, disp_var);
161 /* Build 'goto DISP_VAR;' and insert. */
162 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
163 x = gimple_build_goto (disp_var);
164 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
167 /* Once the old body has been lowered, replace it with the new
168 lowered sequence. */
169 gimple_set_body (current_function_decl, lowered_body);
171 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
172 BLOCK_SUBBLOCKS (data.block)
173 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
175 clear_block_marks (data.block);
176 data.return_statements.release ();
177 return 0;
180 struct gimple_opt_pass pass_lower_cf =
183 GIMPLE_PASS,
184 "lower", /* name */
185 OPTGROUP_NONE, /* optinfo_flags */
186 NULL, /* gate */
187 lower_function_body, /* execute */
188 NULL, /* sub */
189 NULL, /* next */
190 0, /* static_pass_number */
191 TV_NONE, /* tv_id */
192 PROP_gimple_any, /* properties_required */
193 PROP_gimple_lcf, /* properties_provided */
194 0, /* properties_destroyed */
195 0, /* todo_flags_start */
196 0 /* todo_flags_finish */
202 /* Verify if the type of the argument matches that of the function
203 declaration. If we cannot verify this or there is a mismatch,
204 return false. */
206 static bool
207 gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
209 tree parms, p;
210 unsigned int i, nargs;
212 /* Calls to internal functions always match their signature. */
213 if (gimple_call_internal_p (stmt))
214 return true;
216 nargs = gimple_call_num_args (stmt);
218 /* Get argument types for verification. */
219 if (fndecl)
220 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
221 else
222 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
224 /* Verify if the type of the argument matches that of the function
225 declaration. If we cannot verify this or there is a mismatch,
226 return false. */
227 if (fndecl && DECL_ARGUMENTS (fndecl))
229 for (i = 0, p = DECL_ARGUMENTS (fndecl);
230 i < nargs;
231 i++, p = DECL_CHAIN (p))
233 tree arg;
234 /* We cannot distinguish a varargs function from the case
235 of excess parameters, still deferring the inlining decision
236 to the callee is possible. */
237 if (!p)
238 break;
239 arg = gimple_call_arg (stmt, i);
240 if (p == error_mark_node
241 || arg == error_mark_node
242 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
243 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
244 return false;
246 if (args_count_match && p)
247 return false;
249 else if (parms)
251 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
253 tree arg;
254 /* If this is a varargs function defer inlining decision
255 to callee. */
256 if (!p)
257 break;
258 arg = gimple_call_arg (stmt, i);
259 if (TREE_VALUE (p) == error_mark_node
260 || arg == error_mark_node
261 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
262 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
263 && !fold_convertible_p (TREE_VALUE (p), arg)))
264 return false;
267 else
269 if (nargs != 0)
270 return false;
272 return true;
275 /* Verify if the type of the argument and lhs of CALL_STMT matches
276 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
277 true, the arg count needs to be the same.
278 If we cannot verify this or there is a mismatch, return false. */
280 bool
281 gimple_check_call_matching_types (gimple call_stmt, tree callee,
282 bool args_count_match)
284 tree lhs;
286 if ((DECL_RESULT (callee)
287 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
288 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
289 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
290 TREE_TYPE (lhs))
291 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
292 || !gimple_check_call_args (call_stmt, callee, args_count_match))
293 return false;
294 return true;
297 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
298 when they are changed -- if this has to be done, the lowering routine must
299 do it explicitly. DATA is passed through the recursion. */
301 static void
302 lower_sequence (gimple_seq *seq, struct lower_data *data)
304 gimple_stmt_iterator gsi;
306 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
307 lower_stmt (&gsi, data);
311 /* Lower the OpenMP directive statement pointed by GSI. DATA is
312 passed through the recursion. */
314 static void
315 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
317 gimple stmt;
319 stmt = gsi_stmt (*gsi);
321 lower_sequence (gimple_omp_body_ptr (stmt), data);
322 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
323 gimple_omp_set_body (stmt, NULL);
324 gsi_next (gsi);
328 /* Lower statement GSI. DATA is passed through the recursion. We try to
329 track the fallthruness of statements and get rid of unreachable return
330 statements in order to prevent the EH lowering pass from adding useless
331 edges that can cause bogus warnings to be issued later; this guess need
332 not be 100% accurate, simply be conservative and reset cannot_fallthru
333 to false if we don't know. */
335 static void
336 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
338 gimple stmt = gsi_stmt (*gsi);
340 gimple_set_block (stmt, data->block);
342 switch (gimple_code (stmt))
344 case GIMPLE_BIND:
345 lower_gimple_bind (gsi, data);
346 /* Propagate fallthruness. */
347 return;
349 case GIMPLE_COND:
350 case GIMPLE_GOTO:
351 case GIMPLE_SWITCH:
352 data->cannot_fallthru = true;
353 gsi_next (gsi);
354 return;
356 case GIMPLE_RETURN:
357 if (data->cannot_fallthru)
359 gsi_remove (gsi, false);
360 /* Propagate fallthruness. */
362 else
364 lower_gimple_return (gsi, data);
365 data->cannot_fallthru = true;
367 return;
369 case GIMPLE_TRY:
370 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
371 lower_try_catch (gsi, data);
372 else
374 /* It must be a GIMPLE_TRY_FINALLY. */
375 bool cannot_fallthru;
376 lower_sequence (gimple_try_eval_ptr (stmt), data);
377 cannot_fallthru = data->cannot_fallthru;
379 /* The finally clause is always executed after the try clause,
380 so if it does not fall through, then the try-finally will not
381 fall through. Otherwise, if the try clause does not fall
382 through, then when the finally clause falls through it will
383 resume execution wherever the try clause was going. So the
384 whole try-finally will only fall through if both the try
385 clause and the finally clause fall through. */
386 data->cannot_fallthru = false;
387 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
388 data->cannot_fallthru |= cannot_fallthru;
389 gsi_next (gsi);
391 return;
393 case GIMPLE_EH_ELSE:
394 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
395 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
396 break;
398 case GIMPLE_NOP:
399 case GIMPLE_ASM:
400 case GIMPLE_ASSIGN:
401 case GIMPLE_PREDICT:
402 case GIMPLE_LABEL:
403 case GIMPLE_EH_MUST_NOT_THROW:
404 case GIMPLE_OMP_FOR:
405 case GIMPLE_OMP_SECTIONS:
406 case GIMPLE_OMP_SECTIONS_SWITCH:
407 case GIMPLE_OMP_SECTION:
408 case GIMPLE_OMP_SINGLE:
409 case GIMPLE_OMP_MASTER:
410 case GIMPLE_OMP_ORDERED:
411 case GIMPLE_OMP_CRITICAL:
412 case GIMPLE_OMP_RETURN:
413 case GIMPLE_OMP_ATOMIC_LOAD:
414 case GIMPLE_OMP_ATOMIC_STORE:
415 case GIMPLE_OMP_CONTINUE:
416 break;
418 case GIMPLE_CALL:
420 tree decl = gimple_call_fndecl (stmt);
421 unsigned i;
423 for (i = 0; i < gimple_call_num_args (stmt); i++)
425 tree arg = gimple_call_arg (stmt, i);
426 if (EXPR_P (arg))
427 TREE_SET_BLOCK (arg, data->block);
430 if (decl
431 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
432 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
434 lower_builtin_setjmp (gsi);
435 data->cannot_fallthru = false;
436 data->calls_builtin_setjmp = true;
437 return;
440 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
442 data->cannot_fallthru = true;
443 gsi_next (gsi);
444 return;
447 break;
449 case GIMPLE_OMP_PARALLEL:
450 case GIMPLE_OMP_TASK:
451 data->cannot_fallthru = false;
452 lower_omp_directive (gsi, data);
453 data->cannot_fallthru = false;
454 return;
456 case GIMPLE_TRANSACTION:
457 lower_sequence (gimple_transaction_body_ptr (stmt), data);
458 break;
460 default:
461 gcc_unreachable ();
464 data->cannot_fallthru = false;
465 gsi_next (gsi);
468 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
470 static void
471 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
473 tree old_block = data->block;
474 gimple stmt = gsi_stmt (*gsi);
475 tree new_block = gimple_bind_block (stmt);
477 if (new_block)
479 if (new_block == old_block)
481 /* The outermost block of the original function may not be the
482 outermost statement chain of the gimplified function. So we
483 may see the outermost block just inside the function. */
484 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
485 new_block = NULL;
487 else
489 /* We do not expect to handle duplicate blocks. */
490 gcc_assert (!TREE_ASM_WRITTEN (new_block));
491 TREE_ASM_WRITTEN (new_block) = 1;
493 /* Block tree may get clobbered by inlining. Normally this would
494 be fixed in rest_of_decl_compilation using block notes, but
495 since we are not going to emit them, it is up to us. */
496 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
497 BLOCK_SUBBLOCKS (old_block) = new_block;
498 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
499 BLOCK_SUPERCONTEXT (new_block) = old_block;
501 data->block = new_block;
505 record_vars (gimple_bind_vars (stmt));
506 lower_sequence (gimple_bind_body_ptr (stmt), data);
508 if (new_block)
510 gcc_assert (data->block == new_block);
512 BLOCK_SUBBLOCKS (new_block)
513 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
514 data->block = old_block;
517 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
518 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
519 gsi_remove (gsi, false);
522 /* Same as above, but for a GIMPLE_TRY_CATCH. */
524 static void
525 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
527 bool cannot_fallthru;
528 gimple stmt = gsi_stmt (*gsi);
529 gimple_stmt_iterator i;
531 /* We don't handle GIMPLE_TRY_FINALLY. */
532 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
534 lower_sequence (gimple_try_eval_ptr (stmt), data);
535 cannot_fallthru = data->cannot_fallthru;
537 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
538 switch (gimple_code (gsi_stmt (i)))
540 case GIMPLE_CATCH:
541 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
542 catch expression and a body. The whole try/catch may fall
543 through iff any of the catch bodies falls through. */
544 for (; !gsi_end_p (i); gsi_next (&i))
546 data->cannot_fallthru = false;
547 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
548 if (!data->cannot_fallthru)
549 cannot_fallthru = false;
551 break;
553 case GIMPLE_EH_FILTER:
554 /* The exception filter expression only matters if there is an
555 exception. If the exception does not match EH_FILTER_TYPES,
556 we will execute EH_FILTER_FAILURE, and we will fall through
557 if that falls through. If the exception does match
558 EH_FILTER_TYPES, the stack unwinder will continue up the
559 stack, so we will not fall through. We don't know whether we
560 will throw an exception which matches EH_FILTER_TYPES or not,
561 so we just ignore EH_FILTER_TYPES and assume that we might
562 throw an exception which doesn't match. */
563 data->cannot_fallthru = false;
564 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
565 if (!data->cannot_fallthru)
566 cannot_fallthru = false;
567 break;
569 default:
570 /* This case represents statements to be executed when an
571 exception occurs. Those statements are implicitly followed
572 by a GIMPLE_RESX to resume execution after the exception. So
573 in this case the try/catch never falls through. */
574 data->cannot_fallthru = false;
575 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
576 break;
579 data->cannot_fallthru = cannot_fallthru;
580 gsi_next (gsi);
583 /* Try to determine whether a TRY_CATCH expression can fall through.
584 This is a subroutine of block_may_fallthru. */
586 static bool
587 try_catch_may_fallthru (const_tree stmt)
589 tree_stmt_iterator i;
591 /* If the TRY block can fall through, the whole TRY_CATCH can
592 fall through. */
593 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
594 return true;
596 i = tsi_start (TREE_OPERAND (stmt, 1));
597 switch (TREE_CODE (tsi_stmt (i)))
599 case CATCH_EXPR:
600 /* We expect to see a sequence of CATCH_EXPR trees, each with a
601 catch expression and a body. The whole TRY_CATCH may fall
602 through iff any of the catch bodies falls through. */
603 for (; !tsi_end_p (i); tsi_next (&i))
605 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
606 return true;
608 return false;
610 case EH_FILTER_EXPR:
611 /* The exception filter expression only matters if there is an
612 exception. If the exception does not match EH_FILTER_TYPES,
613 we will execute EH_FILTER_FAILURE, and we will fall through
614 if that falls through. If the exception does match
615 EH_FILTER_TYPES, the stack unwinder will continue up the
616 stack, so we will not fall through. We don't know whether we
617 will throw an exception which matches EH_FILTER_TYPES or not,
618 so we just ignore EH_FILTER_TYPES and assume that we might
619 throw an exception which doesn't match. */
620 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
622 default:
623 /* This case represents statements to be executed when an
624 exception occurs. Those statements are implicitly followed
625 by a RESX statement to resume execution after the exception.
626 So in this case the TRY_CATCH never falls through. */
627 return false;
632 /* Same as above, but for a GIMPLE_TRY_CATCH. */
634 static bool
635 gimple_try_catch_may_fallthru (gimple stmt)
637 gimple_stmt_iterator i;
639 /* We don't handle GIMPLE_TRY_FINALLY. */
640 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
642 /* If the TRY block can fall through, the whole TRY_CATCH can
643 fall through. */
644 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
645 return true;
647 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
648 switch (gimple_code (gsi_stmt (i)))
650 case GIMPLE_CATCH:
651 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
652 catch expression and a body. The whole try/catch may fall
653 through iff any of the catch bodies falls through. */
654 for (; !gsi_end_p (i); gsi_next (&i))
656 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
657 return true;
659 return false;
661 case GIMPLE_EH_FILTER:
662 /* The exception filter expression only matters if there is an
663 exception. If the exception does not match EH_FILTER_TYPES,
664 we will execute EH_FILTER_FAILURE, and we will fall through
665 if that falls through. If the exception does match
666 EH_FILTER_TYPES, the stack unwinder will continue up the
667 stack, so we will not fall through. We don't know whether we
668 will throw an exception which matches EH_FILTER_TYPES or not,
669 so we just ignore EH_FILTER_TYPES and assume that we might
670 throw an exception which doesn't match. */
671 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
673 default:
674 /* This case represents statements to be executed when an
675 exception occurs. Those statements are implicitly followed
676 by a GIMPLE_RESX to resume execution after the exception. So
677 in this case the try/catch never falls through. */
678 return false;
683 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
684 need not be 100% accurate; simply be conservative and return true if we
685 don't know. This is used only to avoid stupidly generating extra code.
686 If we're wrong, we'll just delete the extra code later. */
688 bool
689 block_may_fallthru (const_tree block)
691 /* This CONST_CAST is okay because expr_last returns its argument
692 unmodified and we assign it to a const_tree. */
693 const_tree stmt = expr_last (CONST_CAST_TREE(block));
695 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
697 case GOTO_EXPR:
698 case RETURN_EXPR:
699 /* Easy cases. If the last statement of the block implies
700 control transfer, then we can't fall through. */
701 return false;
703 case SWITCH_EXPR:
704 /* If SWITCH_LABELS is set, this is lowered, and represents a
705 branch to a selected label and hence can not fall through.
706 Otherwise SWITCH_BODY is set, and the switch can fall
707 through. */
708 return SWITCH_LABELS (stmt) == NULL_TREE;
710 case COND_EXPR:
711 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
712 return true;
713 return block_may_fallthru (COND_EXPR_ELSE (stmt));
715 case BIND_EXPR:
716 return block_may_fallthru (BIND_EXPR_BODY (stmt));
718 case TRY_CATCH_EXPR:
719 return try_catch_may_fallthru (stmt);
721 case TRY_FINALLY_EXPR:
722 /* The finally clause is always executed after the try clause,
723 so if it does not fall through, then the try-finally will not
724 fall through. Otherwise, if the try clause does not fall
725 through, then when the finally clause falls through it will
726 resume execution wherever the try clause was going. So the
727 whole try-finally will only fall through if both the try
728 clause and the finally clause fall through. */
729 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
730 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
732 case MODIFY_EXPR:
733 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
734 stmt = TREE_OPERAND (stmt, 1);
735 else
736 return true;
737 /* FALLTHRU */
739 case CALL_EXPR:
740 /* Functions that do not return do not fall through. */
741 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
743 case CLEANUP_POINT_EXPR:
744 return block_may_fallthru (TREE_OPERAND (stmt, 0));
746 case TARGET_EXPR:
747 return block_may_fallthru (TREE_OPERAND (stmt, 1));
749 case ERROR_MARK:
750 return true;
752 default:
753 return lang_hooks.block_may_fallthru (stmt);
758 /* Try to determine if we can continue executing the statement
759 immediately following STMT. This guess need not be 100% accurate;
760 simply be conservative and return true if we don't know. This is
761 used only to avoid stupidly generating extra code. If we're wrong,
762 we'll just delete the extra code later. */
764 bool
765 gimple_stmt_may_fallthru (gimple stmt)
767 if (!stmt)
768 return true;
770 switch (gimple_code (stmt))
772 case GIMPLE_GOTO:
773 case GIMPLE_RETURN:
774 case GIMPLE_RESX:
775 /* Easy cases. If the last statement of the seq implies
776 control transfer, then we can't fall through. */
777 return false;
779 case GIMPLE_SWITCH:
780 /* Switch has already been lowered and represents a branch
781 to a selected label and hence can't fall through. */
782 return false;
784 case GIMPLE_COND:
785 /* GIMPLE_COND's are already lowered into a two-way branch. They
786 can't fall through. */
787 return false;
789 case GIMPLE_BIND:
790 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
792 case GIMPLE_TRY:
793 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
794 return gimple_try_catch_may_fallthru (stmt);
796 /* It must be a GIMPLE_TRY_FINALLY. */
798 /* The finally clause is always executed after the try clause,
799 so if it does not fall through, then the try-finally will not
800 fall through. Otherwise, if the try clause does not fall
801 through, then when the finally clause falls through it will
802 resume execution wherever the try clause was going. So the
803 whole try-finally will only fall through if both the try
804 clause and the finally clause fall through. */
805 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
806 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
808 case GIMPLE_EH_ELSE:
809 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
810 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
812 case GIMPLE_CALL:
813 /* Functions that do not return do not fall through. */
814 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
816 default:
817 return true;
822 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
824 bool
825 gimple_seq_may_fallthru (gimple_seq seq)
827 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
831 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
833 static void
834 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
836 gimple stmt = gsi_stmt (*gsi);
837 gimple t;
838 int i;
839 return_statements_t tmp_rs;
841 /* Match this up with an existing return statement that's been created. */
842 for (i = data->return_statements.length () - 1;
843 i >= 0; i--)
845 tmp_rs = data->return_statements[i];
847 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
849 /* Remove the line number from the representative return statement.
850 It now fills in for many such returns. Failure to remove this
851 will result in incorrect results for coverage analysis. */
852 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
854 goto found;
858 /* Not found. Create a new label and record the return statement. */
859 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
860 tmp_rs.stmt = stmt;
861 data->return_statements.safe_push (tmp_rs);
863 /* Generate a goto statement and remove the return statement. */
864 found:
865 /* When not optimizing, make sure user returns are preserved. */
866 if (!optimize && gimple_has_location (stmt))
867 DECL_ARTIFICIAL (tmp_rs.label) = 0;
868 t = gimple_build_goto (tmp_rs.label);
869 gimple_set_location (t, gimple_location (stmt));
870 gimple_set_block (t, gimple_block (stmt));
871 gsi_insert_before (gsi, t, GSI_SAME_STMT);
872 gsi_remove (gsi, false);
875 /* Lower a __builtin_setjmp GSI.
877 __builtin_setjmp is passed a pointer to an array of five words (not
878 all will be used on all machines). It operates similarly to the C
879 library function of the same name, but is more efficient.
881 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
882 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
883 __builtin_setjmp_dispatcher shared among all the instances; that's
884 why it is only emitted at the end by lower_function_body.
886 After full lowering, the body of the function should look like:
889 void * setjmpvar.0;
890 int D.1844;
891 int D.2844;
893 [...]
895 __builtin_setjmp_setup (&buf, &<D1847>);
896 D.1844 = 0;
897 goto <D1846>;
898 <D1847>:;
899 __builtin_setjmp_receiver (&<D1847>);
900 D.1844 = 1;
901 <D1846>:;
902 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
904 [...]
906 __builtin_setjmp_setup (&buf, &<D2847>);
907 D.2844 = 0;
908 goto <D2846>;
909 <D2847>:;
910 __builtin_setjmp_receiver (&<D2847>);
911 D.2844 = 1;
912 <D2846>:;
913 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
915 [...]
917 <D3850>:;
918 return;
919 <D3853>: [non-local];
920 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
921 goto setjmpvar.0;
924 The dispatcher block will be both the unique destination of all the
925 abnormal call edges and the unique source of all the abnormal edges
926 to the receivers, thus keeping the complexity explosion localized. */
928 static void
929 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
931 gimple stmt = gsi_stmt (*gsi);
932 location_t loc = gimple_location (stmt);
933 tree cont_label = create_artificial_label (loc);
934 tree next_label = create_artificial_label (loc);
935 tree dest, t, arg;
936 gimple g;
938 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
939 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
940 FORCED_LABEL (next_label) = 1;
942 dest = gimple_call_lhs (stmt);
944 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
945 arg = build_addr (next_label, current_function_decl);
946 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
947 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
948 gimple_set_location (g, loc);
949 gimple_set_block (g, gimple_block (stmt));
950 gsi_insert_before (gsi, g, GSI_SAME_STMT);
952 /* Build 'DEST = 0' and insert. */
953 if (dest)
955 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
956 gimple_set_location (g, loc);
957 gimple_set_block (g, gimple_block (stmt));
958 gsi_insert_before (gsi, g, GSI_SAME_STMT);
961 /* Build 'goto CONT_LABEL' and insert. */
962 g = gimple_build_goto (cont_label);
963 gsi_insert_before (gsi, g, GSI_SAME_STMT);
965 /* Build 'NEXT_LABEL:' and insert. */
966 g = gimple_build_label (next_label);
967 gsi_insert_before (gsi, g, GSI_SAME_STMT);
969 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
970 arg = build_addr (next_label, current_function_decl);
971 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
972 g = gimple_build_call (t, 1, arg);
973 gimple_set_location (g, loc);
974 gimple_set_block (g, gimple_block (stmt));
975 gsi_insert_before (gsi, g, GSI_SAME_STMT);
977 /* Build 'DEST = 1' and insert. */
978 if (dest)
980 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
981 integer_one_node));
982 gimple_set_location (g, loc);
983 gimple_set_block (g, gimple_block (stmt));
984 gsi_insert_before (gsi, g, GSI_SAME_STMT);
987 /* Build 'CONT_LABEL:' and insert. */
988 g = gimple_build_label (cont_label);
989 gsi_insert_before (gsi, g, GSI_SAME_STMT);
991 /* Remove the call to __builtin_setjmp. */
992 gsi_remove (gsi, false);
996 /* Record the variables in VARS into function FN. */
998 void
999 record_vars_into (tree vars, tree fn)
1001 bool change_cfun = fn != current_function_decl;
1003 if (change_cfun)
1004 push_cfun (DECL_STRUCT_FUNCTION (fn));
1006 for (; vars; vars = DECL_CHAIN (vars))
1008 tree var = vars;
1010 /* BIND_EXPRs contains also function/type/constant declarations
1011 we don't need to care about. */
1012 if (TREE_CODE (var) != VAR_DECL)
1013 continue;
1015 /* Nothing to do in this case. */
1016 if (DECL_EXTERNAL (var))
1017 continue;
1019 /* Record the variable. */
1020 add_local_decl (cfun, var);
1023 if (change_cfun)
1024 pop_cfun ();
1028 /* Record the variables in VARS into current_function_decl. */
1030 void
1031 record_vars (tree vars)
1033 record_vars_into (vars, current_function_decl);