OpenACC: Basic support for #pragma acc parallel.
[official-gcc.git] / gcc / gimple-low.c
blob74c9925e9f261d18e7686b8efeec978a6aae0d71
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2013 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-iterator.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "diagnostic-core.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "gimple-low.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
37 following:
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
53 tree label;
54 gimple stmt;
56 typedef struct return_statements_t return_statements_t;
59 struct lower_data
61 /* Block the current statement belongs to. */
62 tree block;
64 /* A vector of label and return statements to be moved to the end
65 of the function. */
66 vec<return_statements_t> return_statements;
68 /* True if the current statement cannot fall through. */
69 bool cannot_fallthru;
71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp;
75 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
76 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
77 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_builtin_setjmp (gimple_stmt_iterator *);
82 /* Lower the body of current_function_decl from High GIMPLE into Low
83 GIMPLE. */
85 static unsigned int
86 lower_function_body (void)
88 struct lower_data data;
89 gimple_seq body = gimple_body (current_function_decl);
90 gimple_seq lowered_body;
91 gimple_stmt_iterator i;
92 gimple bind;
93 tree t;
94 gimple x;
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
99 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
101 memset (&data, 0, sizeof (data));
102 data.block = DECL_INITIAL (current_function_decl);
103 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
104 BLOCK_CHAIN (data.block) = NULL_TREE;
105 TREE_ASM_WRITTEN (data.block) = 1;
106 data.return_statements.create (8);
108 bind = gimple_seq_first_stmt (body);
109 lowered_body = NULL;
110 gimple_seq_add_stmt (&lowered_body, bind);
111 i = gsi_start (lowered_body);
112 lower_gimple_bind (&i, &data);
114 i = gsi_last (lowered_body);
116 /* If the function falls off the end, we need a null return statement.
117 If we've already got one in the return_statements vector, we don't
118 need to do anything special. Otherwise build one by hand. */
119 if (gimple_seq_may_fallthru (lowered_body)
120 && (data.return_statements.is_empty ()
121 || (gimple_return_retval (data.return_statements.last().stmt)
122 != NULL)))
124 x = gimple_build_return (NULL);
125 gimple_set_location (x, cfun->function_end_locus);
126 gimple_set_block (x, DECL_INITIAL (current_function_decl));
127 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
130 /* If we lowered any return statements, emit the representative
131 at the end of the function. */
132 while (!data.return_statements.is_empty ())
134 return_statements_t t = data.return_statements.pop ();
135 x = gimple_build_label (t.label);
136 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
137 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
140 /* If the function calls __builtin_setjmp, we need to emit the computed
141 goto that will serve as the unique dispatcher for all the receivers. */
142 if (data.calls_builtin_setjmp)
144 tree disp_label, disp_var, arg;
146 /* Build 'DISP_LABEL:' and insert. */
147 disp_label = create_artificial_label (cfun->function_end_locus);
148 /* This mark will create forward edges from every call site. */
149 DECL_NONLOCAL (disp_label) = 1;
150 cfun->has_nonlocal_label = 1;
151 x = gimple_build_label (disp_label);
152 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
154 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
155 and insert. */
156 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
157 arg = build_addr (disp_label, current_function_decl);
158 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
159 x = gimple_build_call (t, 1, arg);
160 gimple_call_set_lhs (x, disp_var);
162 /* Build 'goto DISP_VAR;' and insert. */
163 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
164 x = gimple_build_goto (disp_var);
165 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
168 /* Once the old body has been lowered, replace it with the new
169 lowered sequence. */
170 gimple_set_body (current_function_decl, lowered_body);
172 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
173 BLOCK_SUBBLOCKS (data.block)
174 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
176 clear_block_marks (data.block);
177 data.return_statements.release ();
178 return 0;
181 namespace {
183 const pass_data pass_data_lower_cf =
185 GIMPLE_PASS, /* type */
186 "lower", /* name */
187 OPTGROUP_NONE, /* optinfo_flags */
188 false, /* has_gate */
189 true, /* has_execute */
190 TV_NONE, /* tv_id */
191 PROP_gimple_any, /* properties_required */
192 PROP_gimple_lcf, /* properties_provided */
193 0, /* properties_destroyed */
194 0, /* todo_flags_start */
195 0, /* todo_flags_finish */
198 class pass_lower_cf : public gimple_opt_pass
200 public:
201 pass_lower_cf (gcc::context *ctxt)
202 : gimple_opt_pass (pass_data_lower_cf, ctxt)
205 /* opt_pass methods: */
206 unsigned int execute () { return lower_function_body (); }
208 }; // class pass_lower_cf
210 } // anon namespace
212 gimple_opt_pass *
213 make_pass_lower_cf (gcc::context *ctxt)
215 return new pass_lower_cf (ctxt);
218 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
219 when they are changed -- if this has to be done, the lowering routine must
220 do it explicitly. DATA is passed through the recursion. */
222 static void
223 lower_sequence (gimple_seq *seq, struct lower_data *data)
225 gimple_stmt_iterator gsi;
227 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
228 lower_stmt (&gsi, data);
232 /* Lower the OpenMP directive statement pointed by GSI. DATA is
233 passed through the recursion. */
235 static void
236 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
238 gimple stmt;
240 stmt = gsi_stmt (*gsi);
242 lower_sequence (gimple_omp_body_ptr (stmt), data);
243 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
244 gimple_omp_set_body (stmt, NULL);
245 gsi_next (gsi);
249 /* Lower statement GSI. DATA is passed through the recursion. We try to
250 track the fallthruness of statements and get rid of unreachable return
251 statements in order to prevent the EH lowering pass from adding useless
252 edges that can cause bogus warnings to be issued later; this guess need
253 not be 100% accurate, simply be conservative and reset cannot_fallthru
254 to false if we don't know. */
256 static void
257 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
259 gimple stmt = gsi_stmt (*gsi);
261 gimple_set_block (stmt, data->block);
263 switch (gimple_code (stmt))
265 case GIMPLE_BIND:
266 lower_gimple_bind (gsi, data);
267 /* Propagate fallthruness. */
268 return;
270 case GIMPLE_COND:
271 case GIMPLE_GOTO:
272 case GIMPLE_SWITCH:
273 data->cannot_fallthru = true;
274 gsi_next (gsi);
275 return;
277 case GIMPLE_RETURN:
278 if (data->cannot_fallthru)
280 gsi_remove (gsi, false);
281 /* Propagate fallthruness. */
283 else
285 lower_gimple_return (gsi, data);
286 data->cannot_fallthru = true;
288 return;
290 case GIMPLE_TRY:
291 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
292 lower_try_catch (gsi, data);
293 else
295 /* It must be a GIMPLE_TRY_FINALLY. */
296 bool cannot_fallthru;
297 lower_sequence (gimple_try_eval_ptr (stmt), data);
298 cannot_fallthru = data->cannot_fallthru;
300 /* The finally clause is always executed after the try clause,
301 so if it does not fall through, then the try-finally will not
302 fall through. Otherwise, if the try clause does not fall
303 through, then when the finally clause falls through it will
304 resume execution wherever the try clause was going. So the
305 whole try-finally will only fall through if both the try
306 clause and the finally clause fall through. */
307 data->cannot_fallthru = false;
308 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
309 data->cannot_fallthru |= cannot_fallthru;
310 gsi_next (gsi);
312 return;
314 case GIMPLE_EH_ELSE:
315 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
316 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
317 break;
319 case GIMPLE_NOP:
320 case GIMPLE_ASM:
321 case GIMPLE_ASSIGN:
322 case GIMPLE_PREDICT:
323 case GIMPLE_LABEL:
324 case GIMPLE_EH_MUST_NOT_THROW:
325 case GIMPLE_OMP_FOR:
326 case GIMPLE_OMP_SECTIONS:
327 case GIMPLE_OMP_SECTIONS_SWITCH:
328 case GIMPLE_OMP_SECTION:
329 case GIMPLE_OMP_SINGLE:
330 case GIMPLE_OMP_MASTER:
331 case GIMPLE_OMP_TASKGROUP:
332 case GIMPLE_OMP_ORDERED:
333 case GIMPLE_OMP_CRITICAL:
334 case GIMPLE_OMP_RETURN:
335 case GIMPLE_OMP_ATOMIC_LOAD:
336 case GIMPLE_OMP_ATOMIC_STORE:
337 case GIMPLE_OMP_CONTINUE:
338 break;
340 case GIMPLE_CALL:
342 tree decl = gimple_call_fndecl (stmt);
343 unsigned i;
345 for (i = 0; i < gimple_call_num_args (stmt); i++)
347 tree arg = gimple_call_arg (stmt, i);
348 if (EXPR_P (arg))
349 TREE_SET_BLOCK (arg, data->block);
352 if (decl
353 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
354 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
356 lower_builtin_setjmp (gsi);
357 data->cannot_fallthru = false;
358 data->calls_builtin_setjmp = true;
359 return;
362 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
364 data->cannot_fallthru = true;
365 gsi_next (gsi);
366 return;
369 break;
371 case GIMPLE_OACC_PARALLEL:
372 case GIMPLE_OMP_PARALLEL:
373 case GIMPLE_OMP_TASK:
374 case GIMPLE_OMP_TARGET:
375 case GIMPLE_OMP_TEAMS:
376 data->cannot_fallthru = false;
377 lower_omp_directive (gsi, data);
378 data->cannot_fallthru = false;
379 return;
381 case GIMPLE_TRANSACTION:
382 lower_sequence (gimple_transaction_body_ptr (stmt), data);
383 break;
385 default:
386 gcc_unreachable ();
389 data->cannot_fallthru = false;
390 gsi_next (gsi);
393 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
395 static void
396 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
398 tree old_block = data->block;
399 gimple stmt = gsi_stmt (*gsi);
400 tree new_block = gimple_bind_block (stmt);
402 if (new_block)
404 if (new_block == old_block)
406 /* The outermost block of the original function may not be the
407 outermost statement chain of the gimplified function. So we
408 may see the outermost block just inside the function. */
409 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
410 new_block = NULL;
412 else
414 /* We do not expect to handle duplicate blocks. */
415 gcc_assert (!TREE_ASM_WRITTEN (new_block));
416 TREE_ASM_WRITTEN (new_block) = 1;
418 /* Block tree may get clobbered by inlining. Normally this would
419 be fixed in rest_of_decl_compilation using block notes, but
420 since we are not going to emit them, it is up to us. */
421 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
422 BLOCK_SUBBLOCKS (old_block) = new_block;
423 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
424 BLOCK_SUPERCONTEXT (new_block) = old_block;
426 data->block = new_block;
430 record_vars (gimple_bind_vars (stmt));
431 lower_sequence (gimple_bind_body_ptr (stmt), data);
433 if (new_block)
435 gcc_assert (data->block == new_block);
437 BLOCK_SUBBLOCKS (new_block)
438 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
439 data->block = old_block;
442 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
443 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
444 gsi_remove (gsi, false);
447 /* Same as above, but for a GIMPLE_TRY_CATCH. */
449 static void
450 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
452 bool cannot_fallthru;
453 gimple stmt = gsi_stmt (*gsi);
454 gimple_stmt_iterator i;
456 /* We don't handle GIMPLE_TRY_FINALLY. */
457 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
459 lower_sequence (gimple_try_eval_ptr (stmt), data);
460 cannot_fallthru = data->cannot_fallthru;
462 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
463 switch (gimple_code (gsi_stmt (i)))
465 case GIMPLE_CATCH:
466 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
467 catch expression and a body. The whole try/catch may fall
468 through iff any of the catch bodies falls through. */
469 for (; !gsi_end_p (i); gsi_next (&i))
471 data->cannot_fallthru = false;
472 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
473 if (!data->cannot_fallthru)
474 cannot_fallthru = false;
476 break;
478 case GIMPLE_EH_FILTER:
479 /* The exception filter expression only matters if there is an
480 exception. If the exception does not match EH_FILTER_TYPES,
481 we will execute EH_FILTER_FAILURE, and we will fall through
482 if that falls through. If the exception does match
483 EH_FILTER_TYPES, the stack unwinder will continue up the
484 stack, so we will not fall through. We don't know whether we
485 will throw an exception which matches EH_FILTER_TYPES or not,
486 so we just ignore EH_FILTER_TYPES and assume that we might
487 throw an exception which doesn't match. */
488 data->cannot_fallthru = false;
489 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
490 if (!data->cannot_fallthru)
491 cannot_fallthru = false;
492 break;
494 default:
495 /* This case represents statements to be executed when an
496 exception occurs. Those statements are implicitly followed
497 by a GIMPLE_RESX to resume execution after the exception. So
498 in this case the try/catch never falls through. */
499 data->cannot_fallthru = false;
500 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
501 break;
504 data->cannot_fallthru = cannot_fallthru;
505 gsi_next (gsi);
509 /* Try to determine whether a TRY_CATCH expression can fall through.
510 This is a subroutine of gimple_stmt_may_fallthru. */
512 static bool
513 gimple_try_catch_may_fallthru (gimple stmt)
515 gimple_stmt_iterator i;
517 /* We don't handle GIMPLE_TRY_FINALLY. */
518 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
520 /* If the TRY block can fall through, the whole TRY_CATCH can
521 fall through. */
522 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
523 return true;
525 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
526 switch (gimple_code (gsi_stmt (i)))
528 case GIMPLE_CATCH:
529 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
530 catch expression and a body. The whole try/catch may fall
531 through iff any of the catch bodies falls through. */
532 for (; !gsi_end_p (i); gsi_next (&i))
534 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
535 return true;
537 return false;
539 case GIMPLE_EH_FILTER:
540 /* The exception filter expression only matters if there is an
541 exception. If the exception does not match EH_FILTER_TYPES,
542 we will execute EH_FILTER_FAILURE, and we will fall through
543 if that falls through. If the exception does match
544 EH_FILTER_TYPES, the stack unwinder will continue up the
545 stack, so we will not fall through. We don't know whether we
546 will throw an exception which matches EH_FILTER_TYPES or not,
547 so we just ignore EH_FILTER_TYPES and assume that we might
548 throw an exception which doesn't match. */
549 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
551 default:
552 /* This case represents statements to be executed when an
553 exception occurs. Those statements are implicitly followed
554 by a GIMPLE_RESX to resume execution after the exception. So
555 in this case the try/catch never falls through. */
556 return false;
561 /* Try to determine if we can continue executing the statement
562 immediately following STMT. This guess need not be 100% accurate;
563 simply be conservative and return true if we don't know. This is
564 used only to avoid stupidly generating extra code. If we're wrong,
565 we'll just delete the extra code later. */
567 bool
568 gimple_stmt_may_fallthru (gimple stmt)
570 if (!stmt)
571 return true;
573 switch (gimple_code (stmt))
575 case GIMPLE_GOTO:
576 case GIMPLE_RETURN:
577 case GIMPLE_RESX:
578 /* Easy cases. If the last statement of the seq implies
579 control transfer, then we can't fall through. */
580 return false;
582 case GIMPLE_SWITCH:
583 /* Switch has already been lowered and represents a branch
584 to a selected label and hence can't fall through. */
585 return false;
587 case GIMPLE_COND:
588 /* GIMPLE_COND's are already lowered into a two-way branch. They
589 can't fall through. */
590 return false;
592 case GIMPLE_BIND:
593 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
595 case GIMPLE_TRY:
596 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
597 return gimple_try_catch_may_fallthru (stmt);
599 /* It must be a GIMPLE_TRY_FINALLY. */
601 /* The finally clause is always executed after the try clause,
602 so if it does not fall through, then the try-finally will not
603 fall through. Otherwise, if the try clause does not fall
604 through, then when the finally clause falls through it will
605 resume execution wherever the try clause was going. So the
606 whole try-finally will only fall through if both the try
607 clause and the finally clause fall through. */
608 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
609 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
611 case GIMPLE_EH_ELSE:
612 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
613 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
615 case GIMPLE_CALL:
616 /* Functions that do not return do not fall through. */
617 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
619 default:
620 return true;
625 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
627 bool
628 gimple_seq_may_fallthru (gimple_seq seq)
630 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
634 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
636 static void
637 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
639 gimple stmt = gsi_stmt (*gsi);
640 gimple t;
641 int i;
642 return_statements_t tmp_rs;
644 /* Match this up with an existing return statement that's been created. */
645 for (i = data->return_statements.length () - 1;
646 i >= 0; i--)
648 tmp_rs = data->return_statements[i];
650 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
652 /* Remove the line number from the representative return statement.
653 It now fills in for many such returns. Failure to remove this
654 will result in incorrect results for coverage analysis. */
655 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
657 goto found;
661 /* Not found. Create a new label and record the return statement. */
662 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
663 tmp_rs.stmt = stmt;
664 data->return_statements.safe_push (tmp_rs);
666 /* Generate a goto statement and remove the return statement. */
667 found:
668 /* When not optimizing, make sure user returns are preserved. */
669 if (!optimize && gimple_has_location (stmt))
670 DECL_ARTIFICIAL (tmp_rs.label) = 0;
671 t = gimple_build_goto (tmp_rs.label);
672 gimple_set_location (t, gimple_location (stmt));
673 gimple_set_block (t, gimple_block (stmt));
674 gsi_insert_before (gsi, t, GSI_SAME_STMT);
675 gsi_remove (gsi, false);
678 /* Lower a __builtin_setjmp GSI.
680 __builtin_setjmp is passed a pointer to an array of five words (not
681 all will be used on all machines). It operates similarly to the C
682 library function of the same name, but is more efficient.
684 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
685 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
686 __builtin_setjmp_dispatcher shared among all the instances; that's
687 why it is only emitted at the end by lower_function_body.
689 After full lowering, the body of the function should look like:
692 void * setjmpvar.0;
693 int D.1844;
694 int D.2844;
696 [...]
698 __builtin_setjmp_setup (&buf, &<D1847>);
699 D.1844 = 0;
700 goto <D1846>;
701 <D1847>:;
702 __builtin_setjmp_receiver (&<D1847>);
703 D.1844 = 1;
704 <D1846>:;
705 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
707 [...]
709 __builtin_setjmp_setup (&buf, &<D2847>);
710 D.2844 = 0;
711 goto <D2846>;
712 <D2847>:;
713 __builtin_setjmp_receiver (&<D2847>);
714 D.2844 = 1;
715 <D2846>:;
716 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
718 [...]
720 <D3850>:;
721 return;
722 <D3853>: [non-local];
723 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
724 goto setjmpvar.0;
727 The dispatcher block will be both the unique destination of all the
728 abnormal call edges and the unique source of all the abnormal edges
729 to the receivers, thus keeping the complexity explosion localized. */
731 static void
732 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
734 gimple stmt = gsi_stmt (*gsi);
735 location_t loc = gimple_location (stmt);
736 tree cont_label = create_artificial_label (loc);
737 tree next_label = create_artificial_label (loc);
738 tree dest, t, arg;
739 gimple g;
741 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
742 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
743 FORCED_LABEL (next_label) = 1;
745 dest = gimple_call_lhs (stmt);
747 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
748 arg = build_addr (next_label, current_function_decl);
749 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
750 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
751 gimple_set_location (g, loc);
752 gimple_set_block (g, gimple_block (stmt));
753 gsi_insert_before (gsi, g, GSI_SAME_STMT);
755 /* Build 'DEST = 0' and insert. */
756 if (dest)
758 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
759 gimple_set_location (g, loc);
760 gimple_set_block (g, gimple_block (stmt));
761 gsi_insert_before (gsi, g, GSI_SAME_STMT);
764 /* Build 'goto CONT_LABEL' and insert. */
765 g = gimple_build_goto (cont_label);
766 gsi_insert_before (gsi, g, GSI_SAME_STMT);
768 /* Build 'NEXT_LABEL:' and insert. */
769 g = gimple_build_label (next_label);
770 gsi_insert_before (gsi, g, GSI_SAME_STMT);
772 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
773 arg = build_addr (next_label, current_function_decl);
774 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
775 g = gimple_build_call (t, 1, arg);
776 gimple_set_location (g, loc);
777 gimple_set_block (g, gimple_block (stmt));
778 gsi_insert_before (gsi, g, GSI_SAME_STMT);
780 /* Build 'DEST = 1' and insert. */
781 if (dest)
783 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
784 integer_one_node));
785 gimple_set_location (g, loc);
786 gimple_set_block (g, gimple_block (stmt));
787 gsi_insert_before (gsi, g, GSI_SAME_STMT);
790 /* Build 'CONT_LABEL:' and insert. */
791 g = gimple_build_label (cont_label);
792 gsi_insert_before (gsi, g, GSI_SAME_STMT);
794 /* Remove the call to __builtin_setjmp. */
795 gsi_remove (gsi, false);
799 /* Record the variables in VARS into function FN. */
801 void
802 record_vars_into (tree vars, tree fn)
804 bool change_cfun = fn != current_function_decl;
806 if (change_cfun)
807 push_cfun (DECL_STRUCT_FUNCTION (fn));
809 for (; vars; vars = DECL_CHAIN (vars))
811 tree var = vars;
813 /* BIND_EXPRs contains also function/type/constant declarations
814 we don't need to care about. */
815 if (TREE_CODE (var) != VAR_DECL)
816 continue;
818 /* Nothing to do in this case. */
819 if (DECL_EXTERNAL (var))
820 continue;
822 /* Record the variable. */
823 add_local_decl (cfun, var);
826 if (change_cfun)
827 pop_cfun ();
831 /* Record the variables in VARS into current_function_decl. */
833 void
834 record_vars (tree vars)
836 record_vars_into (vars, current_function_decl);