Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / gimple-low.c
blobeba86727a0595031c6bd2fd1415d68edc50c5246
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "varray.h"
29 #include "gimple.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "diagnostic.h"
33 #include "langhooks.h"
34 #include "langhooks-def.h"
35 #include "tree-flow.h"
36 #include "timevar.h"
37 #include "except.h"
38 #include "hashtab.h"
39 #include "flags.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "toplev.h"
43 #include "tree-pass.h"
45 /* The differences between High GIMPLE and Low GIMPLE are the
46 following:
48 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
50 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 flow and exception regions are built as an on-the-side region
52 hierarchy (See tree-eh.c:lower_eh_constructs).
54 3- Multiple identical return statements are grouped into a single
55 return and gotos to the unique return site. */
57 /* Match a return statement with a label. During lowering, we identify
58 identical return statements and replace duplicates with a jump to
59 the corresponding label. */
60 struct return_statements_t
62 tree label;
63 gimple stmt;
65 typedef struct return_statements_t return_statements_t;
67 DEF_VEC_O(return_statements_t);
68 DEF_VEC_ALLOC_O(return_statements_t,heap);
70 struct lower_data
72 /* Block the current statement belongs to. */
73 tree block;
75 /* A vector of label and return statements to be moved to the end
76 of the function. */
77 VEC(return_statements_t,heap) *return_statements;
79 /* True if the function calls __builtin_setjmp. */
80 bool calls_builtin_setjmp;
83 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
84 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
85 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
86 static void lower_builtin_setjmp (gimple_stmt_iterator *);
89 /* Lower the body of current_function_decl from High GIMPLE into Low
90 GIMPLE. */
92 static unsigned int
93 lower_function_body (void)
95 struct lower_data data;
96 gimple_seq body = gimple_body (current_function_decl);
97 gimple_seq lowered_body;
98 gimple_stmt_iterator i;
99 gimple bind;
100 tree t;
101 gimple x;
103 /* The gimplifier should've left a body of exactly one statement,
104 namely a GIMPLE_BIND. */
105 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
106 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
108 memset (&data, 0, sizeof (data));
109 data.block = DECL_INITIAL (current_function_decl);
110 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
111 BLOCK_CHAIN (data.block) = NULL_TREE;
112 TREE_ASM_WRITTEN (data.block) = 1;
113 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
115 bind = gimple_seq_first_stmt (body);
116 lowered_body = NULL;
117 gimple_seq_add_stmt (&lowered_body, bind);
118 i = gsi_start (lowered_body);
119 lower_gimple_bind (&i, &data);
121 /* Once the old body has been lowered, replace it with the new
122 lowered sequence. */
123 gimple_set_body (current_function_decl, lowered_body);
125 i = gsi_last (lowered_body);
127 /* If the function falls off the end, we need a null return statement.
128 If we've already got one in the return_statements vector, we don't
129 need to do anything special. Otherwise build one by hand. */
130 if (gimple_seq_may_fallthru (lowered_body)
131 && (VEC_empty (return_statements_t, data.return_statements)
132 || gimple_return_retval (VEC_last (return_statements_t,
133 data.return_statements)->stmt) != NULL))
135 x = gimple_build_return (NULL);
136 gimple_set_location (x, cfun->function_end_locus);
137 gimple_set_block (x, DECL_INITIAL (current_function_decl));
138 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
141 /* If we lowered any return statements, emit the representative
142 at the end of the function. */
143 while (!VEC_empty (return_statements_t, data.return_statements))
145 return_statements_t t;
147 /* Unfortunately, we can't use VEC_pop because it returns void for
148 objects. */
149 t = *VEC_last (return_statements_t, data.return_statements);
150 VEC_truncate (return_statements_t,
151 data.return_statements,
152 VEC_length (return_statements_t,
153 data.return_statements) - 1);
155 x = gimple_build_label (t.label);
156 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
158 /* Remove the line number from the representative return statement.
159 It now fills in for many such returns. Failure to remove this
160 will result in incorrect results for coverage analysis. */
161 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
162 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
165 /* If the function calls __builtin_setjmp, we need to emit the computed
166 goto that will serve as the unique dispatcher for all the receivers. */
167 if (data.calls_builtin_setjmp)
169 tree disp_label, disp_var, arg;
171 /* Build 'DISP_LABEL:' and insert. */
172 disp_label = create_artificial_label (cfun->function_end_locus);
173 /* This mark will create forward edges from every call site. */
174 DECL_NONLOCAL (disp_label) = 1;
175 cfun->has_nonlocal_label = 1;
176 x = gimple_build_label (disp_label);
177 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
179 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
180 and insert. */
181 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
182 arg = build_addr (disp_label, current_function_decl);
183 t = implicit_built_in_decls[BUILT_IN_SETJMP_DISPATCHER];
184 x = gimple_build_call (t, 1, arg);
185 gimple_call_set_lhs (x, disp_var);
187 /* Build 'goto DISP_VAR;' and insert. */
188 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
189 x = gimple_build_goto (disp_var);
190 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
193 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
194 BLOCK_SUBBLOCKS (data.block)
195 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
197 clear_block_marks (data.block);
198 VEC_free(return_statements_t, heap, data.return_statements);
199 return 0;
202 struct gimple_opt_pass pass_lower_cf =
205 GIMPLE_PASS,
206 "lower", /* name */
207 NULL, /* gate */
208 lower_function_body, /* execute */
209 NULL, /* sub */
210 NULL, /* next */
211 0, /* static_pass_number */
212 TV_NONE, /* tv_id */
213 PROP_gimple_any, /* properties_required */
214 PROP_gimple_lcf, /* properties_provided */
215 0, /* properties_destroyed */
216 0, /* todo_flags_start */
217 TODO_dump_func /* todo_flags_finish */
222 /* Verify if the type of the argument matches that of the function
223 declaration. If we cannot verify this or there is a mismatch,
224 return false. */
226 bool
227 gimple_check_call_args (gimple stmt)
229 tree fndecl, parms, p;
230 unsigned int i, nargs;
232 nargs = gimple_call_num_args (stmt);
234 /* Get argument types for verification. */
235 fndecl = gimple_call_fndecl (stmt);
236 parms = NULL_TREE;
237 if (fndecl)
238 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
239 else if (POINTER_TYPE_P (TREE_TYPE (gimple_call_fn (stmt))))
240 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
242 /* Verify if the type of the argument matches that of the function
243 declaration. If we cannot verify this or there is a mismatch,
244 return false. */
245 if (fndecl && DECL_ARGUMENTS (fndecl))
247 for (i = 0, p = DECL_ARGUMENTS (fndecl);
248 i < nargs;
249 i++, p = TREE_CHAIN (p))
251 /* We cannot distinguish a varargs function from the case
252 of excess parameters, still deferring the inlining decision
253 to the callee is possible. */
254 if (!p)
255 break;
256 if (p == error_mark_node
257 || gimple_call_arg (stmt, i) == error_mark_node
258 || !fold_convertible_p (DECL_ARG_TYPE (p),
259 gimple_call_arg (stmt, i)))
260 return false;
263 else if (parms)
265 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
267 /* If this is a varargs function defer inlining decision
268 to callee. */
269 if (!p)
270 break;
271 if (TREE_VALUE (p) == error_mark_node
272 || gimple_call_arg (stmt, i) == error_mark_node
273 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
274 || !fold_convertible_p (TREE_VALUE (p),
275 gimple_call_arg (stmt, i)))
276 return false;
279 else
281 if (nargs != 0)
282 return false;
284 return true;
288 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
289 when they are changed -- if this has to be done, the lowering routine must
290 do it explicitly. DATA is passed through the recursion. */
292 static void
293 lower_sequence (gimple_seq seq, struct lower_data *data)
295 gimple_stmt_iterator gsi;
297 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
298 lower_stmt (&gsi, data);
302 /* Lower the OpenMP directive statement pointed by GSI. DATA is
303 passed through the recursion. */
305 static void
306 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
308 gimple stmt;
310 stmt = gsi_stmt (*gsi);
312 lower_sequence (gimple_omp_body (stmt), data);
313 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
314 gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
315 gimple_omp_set_body (stmt, NULL);
316 gsi_remove (gsi, false);
320 /* Lower statement GSI. DATA is passed through the recursion. */
322 static void
323 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
325 gimple stmt = gsi_stmt (*gsi);
327 gimple_set_block (stmt, data->block);
329 switch (gimple_code (stmt))
331 case GIMPLE_BIND:
332 lower_gimple_bind (gsi, data);
333 return;
335 case GIMPLE_COND:
336 /* The gimplifier has already lowered this into gotos. */
337 break;
339 case GIMPLE_RETURN:
340 lower_gimple_return (gsi, data);
341 return;
343 case GIMPLE_TRY:
344 lower_sequence (gimple_try_eval (stmt), data);
345 lower_sequence (gimple_try_cleanup (stmt), data);
346 break;
348 case GIMPLE_CATCH:
349 lower_sequence (gimple_catch_handler (stmt), data);
350 break;
352 case GIMPLE_EH_FILTER:
353 lower_sequence (gimple_eh_filter_failure (stmt), data);
354 break;
356 case GIMPLE_NOP:
357 case GIMPLE_ASM:
358 case GIMPLE_ASSIGN:
359 case GIMPLE_GOTO:
360 case GIMPLE_PREDICT:
361 case GIMPLE_LABEL:
362 case GIMPLE_SWITCH:
363 case GIMPLE_OMP_FOR:
364 case GIMPLE_OMP_SECTIONS:
365 case GIMPLE_OMP_SECTIONS_SWITCH:
366 case GIMPLE_OMP_SECTION:
367 case GIMPLE_OMP_SINGLE:
368 case GIMPLE_OMP_MASTER:
369 case GIMPLE_OMP_ORDERED:
370 case GIMPLE_OMP_CRITICAL:
371 case GIMPLE_OMP_RETURN:
372 case GIMPLE_OMP_ATOMIC_LOAD:
373 case GIMPLE_OMP_ATOMIC_STORE:
374 case GIMPLE_OMP_CONTINUE:
375 break;
377 case GIMPLE_CALL:
379 tree decl = gimple_call_fndecl (stmt);
381 if (decl
382 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
383 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
385 data->calls_builtin_setjmp = true;
386 lower_builtin_setjmp (gsi);
387 return;
390 break;
392 case GIMPLE_OMP_PARALLEL:
393 case GIMPLE_OMP_TASK:
394 lower_omp_directive (gsi, data);
395 return;
397 default:
398 gcc_unreachable ();
401 gsi_next (gsi);
404 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
406 static void
407 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
409 tree old_block = data->block;
410 gimple stmt = gsi_stmt (*gsi);
411 tree new_block = gimple_bind_block (stmt);
413 if (new_block)
415 if (new_block == old_block)
417 /* The outermost block of the original function may not be the
418 outermost statement chain of the gimplified function. So we
419 may see the outermost block just inside the function. */
420 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
421 new_block = NULL;
423 else
425 /* We do not expect to handle duplicate blocks. */
426 gcc_assert (!TREE_ASM_WRITTEN (new_block));
427 TREE_ASM_WRITTEN (new_block) = 1;
429 /* Block tree may get clobbered by inlining. Normally this would
430 be fixed in rest_of_decl_compilation using block notes, but
431 since we are not going to emit them, it is up to us. */
432 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
433 BLOCK_SUBBLOCKS (old_block) = new_block;
434 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
435 BLOCK_SUPERCONTEXT (new_block) = old_block;
437 data->block = new_block;
441 record_vars (gimple_bind_vars (stmt));
442 lower_sequence (gimple_bind_body (stmt), data);
444 if (new_block)
446 gcc_assert (data->block == new_block);
448 BLOCK_SUBBLOCKS (new_block)
449 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
450 data->block = old_block;
453 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
454 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
455 gsi_remove (gsi, false);
458 /* Try to determine whether a TRY_CATCH expression can fall through.
459 This is a subroutine of block_may_fallthru. */
461 static bool
462 try_catch_may_fallthru (const_tree stmt)
464 tree_stmt_iterator i;
466 /* If the TRY block can fall through, the whole TRY_CATCH can
467 fall through. */
468 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
469 return true;
471 i = tsi_start (TREE_OPERAND (stmt, 1));
472 switch (TREE_CODE (tsi_stmt (i)))
474 case CATCH_EXPR:
475 /* We expect to see a sequence of CATCH_EXPR trees, each with a
476 catch expression and a body. The whole TRY_CATCH may fall
477 through iff any of the catch bodies falls through. */
478 for (; !tsi_end_p (i); tsi_next (&i))
480 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
481 return true;
483 return false;
485 case EH_FILTER_EXPR:
486 /* The exception filter expression only matters if there is an
487 exception. If the exception does not match EH_FILTER_TYPES,
488 we will execute EH_FILTER_FAILURE, and we will fall through
489 if that falls through. If the exception does match
490 EH_FILTER_TYPES, the stack unwinder will continue up the
491 stack, so we will not fall through. We don't know whether we
492 will throw an exception which matches EH_FILTER_TYPES or not,
493 so we just ignore EH_FILTER_TYPES and assume that we might
494 throw an exception which doesn't match. */
495 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
497 default:
498 /* This case represents statements to be executed when an
499 exception occurs. Those statements are implicitly followed
500 by a RESX_EXPR to resume execution after the exception. So
501 in this case the TRY_CATCH never falls through. */
502 return false;
507 /* Same as above, but for a GIMPLE_TRY_CATCH. */
509 static bool
510 gimple_try_catch_may_fallthru (gimple stmt)
512 gimple_stmt_iterator i;
514 /* We don't handle GIMPLE_TRY_FINALLY. */
515 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
517 /* If the TRY block can fall through, the whole TRY_CATCH can
518 fall through. */
519 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
520 return true;
522 i = gsi_start (gimple_try_cleanup (stmt));
523 switch (gimple_code (gsi_stmt (i)))
525 case GIMPLE_CATCH:
526 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
527 catch expression and a body. The whole try/catch may fall
528 through iff any of the catch bodies falls through. */
529 for (; !gsi_end_p (i); gsi_next (&i))
531 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
532 return true;
534 return false;
536 case GIMPLE_EH_FILTER:
537 /* The exception filter expression only matters if there is an
538 exception. If the exception does not match EH_FILTER_TYPES,
539 we will execute EH_FILTER_FAILURE, and we will fall through
540 if that falls through. If the exception does match
541 EH_FILTER_TYPES, the stack unwinder will continue up the
542 stack, so we will not fall through. We don't know whether we
543 will throw an exception which matches EH_FILTER_TYPES or not,
544 so we just ignore EH_FILTER_TYPES and assume that we might
545 throw an exception which doesn't match. */
546 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
548 default:
549 /* This case represents statements to be executed when an
550 exception occurs. Those statements are implicitly followed
551 by a GIMPLE_RESX to resume execution after the exception. So
552 in this case the try/catch never falls through. */
553 return false;
558 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
559 need not be 100% accurate; simply be conservative and return true if we
560 don't know. This is used only to avoid stupidly generating extra code.
561 If we're wrong, we'll just delete the extra code later. */
563 bool
564 block_may_fallthru (const_tree block)
566 /* This CONST_CAST is okay because expr_last returns its argument
567 unmodified and we assign it to a const_tree. */
568 const_tree stmt = expr_last (CONST_CAST_TREE(block));
570 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
572 case GOTO_EXPR:
573 case RETURN_EXPR:
574 case RESX_EXPR:
575 /* Easy cases. If the last statement of the block implies
576 control transfer, then we can't fall through. */
577 return false;
579 case SWITCH_EXPR:
580 /* If SWITCH_LABELS is set, this is lowered, and represents a
581 branch to a selected label and hence can not fall through.
582 Otherwise SWITCH_BODY is set, and the switch can fall
583 through. */
584 return SWITCH_LABELS (stmt) == NULL_TREE;
586 case COND_EXPR:
587 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
588 return true;
589 return block_may_fallthru (COND_EXPR_ELSE (stmt));
591 case BIND_EXPR:
592 return block_may_fallthru (BIND_EXPR_BODY (stmt));
594 case TRY_CATCH_EXPR:
595 return try_catch_may_fallthru (stmt);
597 case TRY_FINALLY_EXPR:
598 /* The finally clause is always executed after the try clause,
599 so if it does not fall through, then the try-finally will not
600 fall through. Otherwise, if the try clause does not fall
601 through, then when the finally clause falls through it will
602 resume execution wherever the try clause was going. So the
603 whole try-finally will only fall through if both the try
604 clause and the finally clause fall through. */
605 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
606 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
608 case MODIFY_EXPR:
609 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
610 stmt = TREE_OPERAND (stmt, 1);
611 else
612 return true;
613 /* FALLTHRU */
615 case CALL_EXPR:
616 /* Functions that do not return do not fall through. */
617 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
619 case CLEANUP_POINT_EXPR:
620 return block_may_fallthru (TREE_OPERAND (stmt, 0));
622 default:
623 return true;
628 /* Try to determine if we can continue executing the statement
629 immediately following STMT. This guess need not be 100% accurate;
630 simply be conservative and return true if we don't know. This is
631 used only to avoid stupidly generating extra code. If we're wrong,
632 we'll just delete the extra code later. */
634 bool
635 gimple_stmt_may_fallthru (gimple stmt)
637 if (!stmt)
638 return true;
640 switch (gimple_code (stmt))
642 case GIMPLE_GOTO:
643 case GIMPLE_RETURN:
644 case GIMPLE_RESX:
645 /* Easy cases. If the last statement of the seq implies
646 control transfer, then we can't fall through. */
647 return false;
649 case GIMPLE_SWITCH:
650 /* Switch has already been lowered and represents a
651 branch to a selected label and hence can not fall through. */
652 return true;
654 case GIMPLE_COND:
655 /* GIMPLE_COND's are already lowered into a two-way branch. They
656 can't fall through. */
657 return false;
659 case GIMPLE_BIND:
660 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
662 case GIMPLE_TRY:
663 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
664 return gimple_try_catch_may_fallthru (stmt);
666 /* It must be a GIMPLE_TRY_FINALLY. */
668 /* The finally clause is always executed after the try clause,
669 so if it does not fall through, then the try-finally will not
670 fall through. Otherwise, if the try clause does not fall
671 through, then when the finally clause falls through it will
672 resume execution wherever the try clause was going. So the
673 whole try-finally will only fall through if both the try
674 clause and the finally clause fall through. */
675 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
676 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
678 case GIMPLE_ASSIGN:
679 return true;
681 case GIMPLE_CALL:
682 /* Functions that do not return do not fall through. */
683 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
685 default:
686 return true;
691 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
693 bool
694 gimple_seq_may_fallthru (gimple_seq seq)
696 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
700 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
702 static void
703 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
705 gimple stmt = gsi_stmt (*gsi);
706 gimple t;
707 int i;
708 return_statements_t tmp_rs;
710 /* Match this up with an existing return statement that's been created. */
711 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
712 i >= 0; i--)
714 tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
716 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
717 goto found;
720 /* Not found. Create a new label and record the return statement. */
721 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
722 tmp_rs.stmt = stmt;
723 VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
725 /* Generate a goto statement and remove the return statement. */
726 found:
727 t = gimple_build_goto (tmp_rs.label);
728 gimple_set_location (t, gimple_location (stmt));
729 gimple_set_block (t, gimple_block (stmt));
730 gsi_insert_before (gsi, t, GSI_SAME_STMT);
731 gsi_remove (gsi, false);
734 /* Lower a __builtin_setjmp TSI.
736 __builtin_setjmp is passed a pointer to an array of five words (not
737 all will be used on all machines). It operates similarly to the C
738 library function of the same name, but is more efficient.
740 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
741 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
742 __builtin_setjmp_dispatcher shared among all the instances; that's
743 why it is only emitted at the end by lower_function_body.
745 After full lowering, the body of the function should look like:
748 void * setjmpvar.0;
749 int D.1844;
750 int D.2844;
752 [...]
754 __builtin_setjmp_setup (&buf, &<D1847>);
755 D.1844 = 0;
756 goto <D1846>;
757 <D1847>:;
758 __builtin_setjmp_receiver (&<D1847>);
759 D.1844 = 1;
760 <D1846>:;
761 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
763 [...]
765 __builtin_setjmp_setup (&buf, &<D2847>);
766 D.2844 = 0;
767 goto <D2846>;
768 <D2847>:;
769 __builtin_setjmp_receiver (&<D2847>);
770 D.2844 = 1;
771 <D2846>:;
772 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
774 [...]
776 <D3850>:;
777 return;
778 <D3853>: [non-local];
779 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
780 goto setjmpvar.0;
783 The dispatcher block will be both the unique destination of all the
784 abnormal call edges and the unique source of all the abnormal edges
785 to the receivers, thus keeping the complexity explosion localized. */
787 static void
788 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
790 gimple stmt = gsi_stmt (*gsi);
791 location_t loc = gimple_location (stmt);
792 tree cont_label = create_artificial_label (loc);
793 tree next_label = create_artificial_label (loc);
794 tree dest, t, arg;
795 gimple g;
797 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
798 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
799 FORCED_LABEL (next_label) = 1;
801 dest = gimple_call_lhs (stmt);
803 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
804 arg = build_addr (next_label, current_function_decl);
805 t = implicit_built_in_decls[BUILT_IN_SETJMP_SETUP];
806 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
807 gimple_set_location (g, loc);
808 gimple_set_block (g, gimple_block (stmt));
809 gsi_insert_before (gsi, g, GSI_SAME_STMT);
811 /* Build 'DEST = 0' and insert. */
812 if (dest)
814 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
815 integer_zero_node));
816 gimple_set_location (g, loc);
817 gimple_set_block (g, gimple_block (stmt));
818 gsi_insert_before (gsi, g, GSI_SAME_STMT);
821 /* Build 'goto CONT_LABEL' and insert. */
822 g = gimple_build_goto (cont_label);
823 gsi_insert_before (gsi, g, GSI_SAME_STMT);
825 /* Build 'NEXT_LABEL:' and insert. */
826 g = gimple_build_label (next_label);
827 gsi_insert_before (gsi, g, GSI_SAME_STMT);
829 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
830 arg = build_addr (next_label, current_function_decl);
831 t = implicit_built_in_decls[BUILT_IN_SETJMP_RECEIVER];
832 g = gimple_build_call (t, 1, arg);
833 gimple_set_location (g, loc);
834 gimple_set_block (g, gimple_block (stmt));
835 gsi_insert_before (gsi, g, GSI_SAME_STMT);
837 /* Build 'DEST = 1' and insert. */
838 if (dest)
840 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
841 integer_one_node));
842 gimple_set_location (g, loc);
843 gimple_set_block (g, gimple_block (stmt));
844 gsi_insert_before (gsi, g, GSI_SAME_STMT);
847 /* Build 'CONT_LABEL:' and insert. */
848 g = gimple_build_label (cont_label);
849 gsi_insert_before (gsi, g, GSI_SAME_STMT);
851 /* Remove the call to __builtin_setjmp. */
852 gsi_remove (gsi, false);
856 /* Record the variables in VARS into function FN. */
858 void
859 record_vars_into (tree vars, tree fn)
861 if (fn != current_function_decl)
862 push_cfun (DECL_STRUCT_FUNCTION (fn));
864 for (; vars; vars = TREE_CHAIN (vars))
866 tree var = vars;
868 /* BIND_EXPRs contains also function/type/constant declarations
869 we don't need to care about. */
870 if (TREE_CODE (var) != VAR_DECL)
871 continue;
873 /* Nothing to do in this case. */
874 if (DECL_EXTERNAL (var))
875 continue;
877 /* Record the variable. */
878 cfun->local_decls = tree_cons (NULL_TREE, var,
879 cfun->local_decls);
882 if (fn != current_function_decl)
883 pop_cfun ();
887 /* Record the variables in VARS into current_function_decl. */
889 void
890 record_vars (tree vars)
892 record_vars_into (vars, current_function_decl);