2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-low.c
blobac2a0df0a4a1ee1038b45923420f014404134c8e
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2015 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "tree-nested.h"
31 #include "calls.h"
32 #include "predict.h"
33 #include "hard-reg-set.h"
34 #include "input.h"
35 #include "function.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "is-a.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "tree-iterator.h"
44 #include "tree-inline.h"
45 #include "flags.h"
46 #include "diagnostic-core.h"
47 #include "tree-pass.h"
48 #include "langhooks.h"
49 #include "gimple-low.h"
50 #include "tree-nested.h"
52 /* The differences between High GIMPLE and Low GIMPLE are the
53 following:
55 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
57 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
58 flow and exception regions are built as an on-the-side region
59 hierarchy (See tree-eh.c:lower_eh_constructs).
61 3- Multiple identical return statements are grouped into a single
62 return and gotos to the unique return site. */
64 /* Match a return statement with a label. During lowering, we identify
65 identical return statements and replace duplicates with a jump to
66 the corresponding label. */
67 struct return_statements_t
69 tree label;
70 greturn *stmt;
72 typedef struct return_statements_t return_statements_t;
75 struct lower_data
77 /* Block the current statement belongs to. */
78 tree block;
80 /* A vector of label and return statements to be moved to the end
81 of the function. */
82 vec<return_statements_t> return_statements;
84 /* True if the current statement cannot fall through. */
85 bool cannot_fallthru;
88 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
89 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
90 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
91 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
92 static void lower_builtin_setjmp (gimple_stmt_iterator *);
93 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
96 /* Lower the body of current_function_decl from High GIMPLE into Low
97 GIMPLE. */
99 static unsigned int
100 lower_function_body (void)
102 struct lower_data data;
103 gimple_seq body = gimple_body (current_function_decl);
104 gimple_seq lowered_body;
105 gimple_stmt_iterator i;
106 gimple bind;
107 gimple x;
109 /* The gimplifier should've left a body of exactly one statement,
110 namely a GIMPLE_BIND. */
111 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
112 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
114 memset (&data, 0, sizeof (data));
115 data.block = DECL_INITIAL (current_function_decl);
116 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
117 BLOCK_CHAIN (data.block) = NULL_TREE;
118 TREE_ASM_WRITTEN (data.block) = 1;
119 data.return_statements.create (8);
121 bind = gimple_seq_first_stmt (body);
122 lowered_body = NULL;
123 gimple_seq_add_stmt (&lowered_body, bind);
124 i = gsi_start (lowered_body);
125 lower_gimple_bind (&i, &data);
127 i = gsi_last (lowered_body);
129 /* If the function falls off the end, we need a null return statement.
130 If we've already got one in the return_statements vector, we don't
131 need to do anything special. Otherwise build one by hand. */
132 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
133 if (may_fallthru
134 && (data.return_statements.is_empty ()
135 || (gimple_return_retval (data.return_statements.last().stmt)
136 != NULL)))
138 x = gimple_build_return (NULL);
139 gimple_set_location (x, cfun->function_end_locus);
140 gimple_set_block (x, DECL_INITIAL (current_function_decl));
141 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
142 may_fallthru = false;
145 /* If we lowered any return statements, emit the representative
146 at the end of the function. */
147 while (!data.return_statements.is_empty ())
149 return_statements_t t = data.return_statements.pop ();
150 x = gimple_build_label (t.label);
151 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
152 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
153 if (may_fallthru)
155 /* Remove the line number from the representative return statement.
156 It now fills in for the fallthru too. Failure to remove this
157 will result in incorrect results for coverage analysis. */
158 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
159 may_fallthru = false;
163 /* Once the old body has been lowered, replace it with the new
164 lowered sequence. */
165 gimple_set_body (current_function_decl, lowered_body);
167 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
168 BLOCK_SUBBLOCKS (data.block)
169 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
171 clear_block_marks (data.block);
172 data.return_statements.release ();
173 return 0;
176 namespace {
178 const pass_data pass_data_lower_cf =
180 GIMPLE_PASS, /* type */
181 "lower", /* name */
182 OPTGROUP_NONE, /* optinfo_flags */
183 TV_NONE, /* tv_id */
184 PROP_gimple_any, /* properties_required */
185 PROP_gimple_lcf, /* properties_provided */
186 0, /* properties_destroyed */
187 0, /* todo_flags_start */
188 0, /* todo_flags_finish */
191 class pass_lower_cf : public gimple_opt_pass
193 public:
194 pass_lower_cf (gcc::context *ctxt)
195 : gimple_opt_pass (pass_data_lower_cf, ctxt)
198 /* opt_pass methods: */
199 virtual unsigned int execute (function *) { return lower_function_body (); }
201 }; // class pass_lower_cf
203 } // anon namespace
205 gimple_opt_pass *
206 make_pass_lower_cf (gcc::context *ctxt)
208 return new pass_lower_cf (ctxt);
211 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
212 when they are changed -- if this has to be done, the lowering routine must
213 do it explicitly. DATA is passed through the recursion. */
215 static void
216 lower_sequence (gimple_seq *seq, struct lower_data *data)
218 gimple_stmt_iterator gsi;
220 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
221 lower_stmt (&gsi, data);
225 /* Lower the OpenMP directive statement pointed by GSI. DATA is
226 passed through the recursion. */
228 static void
229 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
231 gimple stmt;
233 stmt = gsi_stmt (*gsi);
235 lower_sequence (gimple_omp_body_ptr (stmt), data);
236 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
237 gimple_omp_set_body (stmt, NULL);
238 gsi_next (gsi);
242 /* Lower statement GSI. DATA is passed through the recursion. We try to
243 track the fallthruness of statements and get rid of unreachable return
244 statements in order to prevent the EH lowering pass from adding useless
245 edges that can cause bogus warnings to be issued later; this guess need
246 not be 100% accurate, simply be conservative and reset cannot_fallthru
247 to false if we don't know. */
249 static void
250 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
252 gimple stmt = gsi_stmt (*gsi);
254 gimple_set_block (stmt, data->block);
256 switch (gimple_code (stmt))
258 case GIMPLE_BIND:
259 lower_gimple_bind (gsi, data);
260 /* Propagate fallthruness. */
261 return;
263 case GIMPLE_COND:
264 case GIMPLE_GOTO:
265 case GIMPLE_SWITCH:
266 data->cannot_fallthru = true;
267 gsi_next (gsi);
268 return;
270 case GIMPLE_RETURN:
271 if (data->cannot_fallthru)
273 gsi_remove (gsi, false);
274 /* Propagate fallthruness. */
276 else
278 lower_gimple_return (gsi, data);
279 data->cannot_fallthru = true;
281 return;
283 case GIMPLE_TRY:
284 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
285 lower_try_catch (gsi, data);
286 else
288 /* It must be a GIMPLE_TRY_FINALLY. */
289 bool cannot_fallthru;
290 lower_sequence (gimple_try_eval_ptr (stmt), data);
291 cannot_fallthru = data->cannot_fallthru;
293 /* The finally clause is always executed after the try clause,
294 so if it does not fall through, then the try-finally will not
295 fall through. Otherwise, if the try clause does not fall
296 through, then when the finally clause falls through it will
297 resume execution wherever the try clause was going. So the
298 whole try-finally will only fall through if both the try
299 clause and the finally clause fall through. */
300 data->cannot_fallthru = false;
301 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
302 data->cannot_fallthru |= cannot_fallthru;
303 gsi_next (gsi);
305 return;
307 case GIMPLE_EH_ELSE:
309 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
310 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
311 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
313 break;
315 case GIMPLE_NOP:
316 case GIMPLE_ASM:
317 case GIMPLE_ASSIGN:
318 case GIMPLE_PREDICT:
319 case GIMPLE_LABEL:
320 case GIMPLE_EH_MUST_NOT_THROW:
321 case GIMPLE_OMP_FOR:
322 case GIMPLE_OMP_SECTIONS:
323 case GIMPLE_OMP_SECTIONS_SWITCH:
324 case GIMPLE_OMP_SECTION:
325 case GIMPLE_OMP_SINGLE:
326 case GIMPLE_OMP_MASTER:
327 case GIMPLE_OMP_TASKGROUP:
328 case GIMPLE_OMP_ORDERED:
329 case GIMPLE_OMP_CRITICAL:
330 case GIMPLE_OMP_RETURN:
331 case GIMPLE_OMP_ATOMIC_LOAD:
332 case GIMPLE_OMP_ATOMIC_STORE:
333 case GIMPLE_OMP_CONTINUE:
334 break;
336 case GIMPLE_CALL:
338 tree decl = gimple_call_fndecl (stmt);
339 unsigned i;
341 for (i = 0; i < gimple_call_num_args (stmt); i++)
343 tree arg = gimple_call_arg (stmt, i);
344 if (EXPR_P (arg))
345 TREE_SET_BLOCK (arg, data->block);
348 if (decl
349 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
351 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
353 lower_builtin_setjmp (gsi);
354 data->cannot_fallthru = false;
355 return;
357 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
358 && flag_tree_bit_ccp)
360 lower_builtin_posix_memalign (gsi);
361 return;
365 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
367 data->cannot_fallthru = true;
368 gsi_next (gsi);
369 return;
372 break;
374 case GIMPLE_OMP_PARALLEL:
375 case GIMPLE_OMP_TASK:
376 case GIMPLE_OMP_TARGET:
377 case GIMPLE_OMP_TEAMS:
378 data->cannot_fallthru = false;
379 lower_omp_directive (gsi, data);
380 data->cannot_fallthru = false;
381 return;
383 case GIMPLE_TRANSACTION:
384 lower_sequence (gimple_transaction_body_ptr (
385 as_a <gtransaction *> (stmt)),
386 data);
387 break;
389 default:
390 gcc_unreachable ();
393 data->cannot_fallthru = false;
394 gsi_next (gsi);
397 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
399 static void
400 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
402 tree old_block = data->block;
403 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
404 tree new_block = gimple_bind_block (stmt);
406 if (new_block)
408 if (new_block == old_block)
410 /* The outermost block of the original function may not be the
411 outermost statement chain of the gimplified function. So we
412 may see the outermost block just inside the function. */
413 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
414 new_block = NULL;
416 else
418 /* We do not expect to handle duplicate blocks. */
419 gcc_assert (!TREE_ASM_WRITTEN (new_block));
420 TREE_ASM_WRITTEN (new_block) = 1;
422 /* Block tree may get clobbered by inlining. Normally this would
423 be fixed in rest_of_decl_compilation using block notes, but
424 since we are not going to emit them, it is up to us. */
425 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
426 BLOCK_SUBBLOCKS (old_block) = new_block;
427 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
428 BLOCK_SUPERCONTEXT (new_block) = old_block;
430 data->block = new_block;
434 record_vars (gimple_bind_vars (stmt));
435 lower_sequence (gimple_bind_body_ptr (stmt), data);
437 if (new_block)
439 gcc_assert (data->block == new_block);
441 BLOCK_SUBBLOCKS (new_block)
442 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
443 data->block = old_block;
446 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
447 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
448 gsi_remove (gsi, false);
451 /* Same as above, but for a GIMPLE_TRY_CATCH. */
453 static void
454 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
456 bool cannot_fallthru;
457 gimple stmt = gsi_stmt (*gsi);
458 gimple_stmt_iterator i;
460 /* We don't handle GIMPLE_TRY_FINALLY. */
461 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
463 lower_sequence (gimple_try_eval_ptr (stmt), data);
464 cannot_fallthru = data->cannot_fallthru;
466 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
467 switch (gimple_code (gsi_stmt (i)))
469 case GIMPLE_CATCH:
470 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
471 catch expression and a body. The whole try/catch may fall
472 through iff any of the catch bodies falls through. */
473 for (; !gsi_end_p (i); gsi_next (&i))
475 data->cannot_fallthru = false;
476 lower_sequence (gimple_catch_handler_ptr (
477 as_a <gcatch *> (gsi_stmt (i))),
478 data);
479 if (!data->cannot_fallthru)
480 cannot_fallthru = false;
482 break;
484 case GIMPLE_EH_FILTER:
485 /* The exception filter expression only matters if there is an
486 exception. If the exception does not match EH_FILTER_TYPES,
487 we will execute EH_FILTER_FAILURE, and we will fall through
488 if that falls through. If the exception does match
489 EH_FILTER_TYPES, the stack unwinder will continue up the
490 stack, so we will not fall through. We don't know whether we
491 will throw an exception which matches EH_FILTER_TYPES or not,
492 so we just ignore EH_FILTER_TYPES and assume that we might
493 throw an exception which doesn't match. */
494 data->cannot_fallthru = false;
495 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
496 if (!data->cannot_fallthru)
497 cannot_fallthru = false;
498 break;
500 default:
501 /* This case represents statements to be executed when an
502 exception occurs. Those statements are implicitly followed
503 by a GIMPLE_RESX to resume execution after the exception. So
504 in this case the try/catch never falls through. */
505 data->cannot_fallthru = false;
506 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
507 break;
510 data->cannot_fallthru = cannot_fallthru;
511 gsi_next (gsi);
515 /* Try to determine whether a TRY_CATCH expression can fall through.
516 This is a subroutine of gimple_stmt_may_fallthru. */
518 static bool
519 gimple_try_catch_may_fallthru (gtry *stmt)
521 gimple_stmt_iterator i;
523 /* We don't handle GIMPLE_TRY_FINALLY. */
524 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
526 /* If the TRY block can fall through, the whole TRY_CATCH can
527 fall through. */
528 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
529 return true;
531 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
532 switch (gimple_code (gsi_stmt (i)))
534 case GIMPLE_CATCH:
535 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
536 catch expression and a body. The whole try/catch may fall
537 through iff any of the catch bodies falls through. */
538 for (; !gsi_end_p (i); gsi_next (&i))
540 if (gimple_seq_may_fallthru (gimple_catch_handler (
541 as_a <gcatch *> (gsi_stmt (i)))))
542 return true;
544 return false;
546 case GIMPLE_EH_FILTER:
547 /* The exception filter expression only matters if there is an
548 exception. If the exception does not match EH_FILTER_TYPES,
549 we will execute EH_FILTER_FAILURE, and we will fall through
550 if that falls through. If the exception does match
551 EH_FILTER_TYPES, the stack unwinder will continue up the
552 stack, so we will not fall through. We don't know whether we
553 will throw an exception which matches EH_FILTER_TYPES or not,
554 so we just ignore EH_FILTER_TYPES and assume that we might
555 throw an exception which doesn't match. */
556 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
558 default:
559 /* This case represents statements to be executed when an
560 exception occurs. Those statements are implicitly followed
561 by a GIMPLE_RESX to resume execution after the exception. So
562 in this case the try/catch never falls through. */
563 return false;
568 /* Try to determine if we can continue executing the statement
569 immediately following STMT. This guess need not be 100% accurate;
570 simply be conservative and return true if we don't know. This is
571 used only to avoid stupidly generating extra code. If we're wrong,
572 we'll just delete the extra code later. */
574 bool
575 gimple_stmt_may_fallthru (gimple stmt)
577 if (!stmt)
578 return true;
580 switch (gimple_code (stmt))
582 case GIMPLE_GOTO:
583 case GIMPLE_RETURN:
584 case GIMPLE_RESX:
585 /* Easy cases. If the last statement of the seq implies
586 control transfer, then we can't fall through. */
587 return false;
589 case GIMPLE_SWITCH:
590 /* Switch has already been lowered and represents a branch
591 to a selected label and hence can't fall through. */
592 return false;
594 case GIMPLE_COND:
595 /* GIMPLE_COND's are already lowered into a two-way branch. They
596 can't fall through. */
597 return false;
599 case GIMPLE_BIND:
600 return gimple_seq_may_fallthru (
601 gimple_bind_body (as_a <gbind *> (stmt)));
603 case GIMPLE_TRY:
604 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
605 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
607 /* It must be a GIMPLE_TRY_FINALLY. */
609 /* The finally clause is always executed after the try clause,
610 so if it does not fall through, then the try-finally will not
611 fall through. Otherwise, if the try clause does not fall
612 through, then when the finally clause falls through it will
613 resume execution wherever the try clause was going. So the
614 whole try-finally will only fall through if both the try
615 clause and the finally clause fall through. */
616 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
617 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
619 case GIMPLE_EH_ELSE:
621 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
622 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
623 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
624 eh_else_stmt)));
627 case GIMPLE_CALL:
628 /* Functions that do not return do not fall through. */
629 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
631 default:
632 return true;
637 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
639 bool
640 gimple_seq_may_fallthru (gimple_seq seq)
642 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
646 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
648 static void
649 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
651 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
652 gimple t;
653 int i;
654 return_statements_t tmp_rs;
656 /* Match this up with an existing return statement that's been created. */
657 for (i = data->return_statements.length () - 1;
658 i >= 0; i--)
660 tmp_rs = data->return_statements[i];
662 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
664 /* Remove the line number from the representative return statement.
665 It now fills in for many such returns. Failure to remove this
666 will result in incorrect results for coverage analysis. */
667 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
669 goto found;
673 /* Not found. Create a new label and record the return statement. */
674 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
675 tmp_rs.stmt = stmt;
676 data->return_statements.safe_push (tmp_rs);
678 /* Generate a goto statement and remove the return statement. */
679 found:
680 /* When not optimizing, make sure user returns are preserved. */
681 if (!optimize && gimple_has_location (stmt))
682 DECL_ARTIFICIAL (tmp_rs.label) = 0;
683 t = gimple_build_goto (tmp_rs.label);
684 gimple_set_location (t, gimple_location (stmt));
685 gimple_set_block (t, gimple_block (stmt));
686 gsi_insert_before (gsi, t, GSI_SAME_STMT);
687 gsi_remove (gsi, false);
690 /* Lower a __builtin_setjmp GSI.
692 __builtin_setjmp is passed a pointer to an array of five words (not
693 all will be used on all machines). It operates similarly to the C
694 library function of the same name, but is more efficient.
696 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
697 __builtin_setjmp_receiver.
699 After full lowering, the body of the function should look like:
702 int D.1844;
703 int D.2844;
705 [...]
707 __builtin_setjmp_setup (&buf, &<D1847>);
708 D.1844 = 0;
709 goto <D1846>;
710 <D1847>:;
711 __builtin_setjmp_receiver (&<D1847>);
712 D.1844 = 1;
713 <D1846>:;
714 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
716 [...]
718 __builtin_setjmp_setup (&buf, &<D2847>);
719 D.2844 = 0;
720 goto <D2846>;
721 <D2847>:;
722 __builtin_setjmp_receiver (&<D2847>);
723 D.2844 = 1;
724 <D2846>:;
725 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
727 [...]
729 <D3850>:;
730 return;
733 During cfg creation an extra per-function (or per-OpenMP region)
734 block with ABNORMAL_DISPATCHER internal call will be added, unique
735 destination of all the abnormal call edges and the unique source of
736 all the abnormal edges to the receivers, thus keeping the complexity
737 explosion localized. */
739 static void
740 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
742 gimple stmt = gsi_stmt (*gsi);
743 location_t loc = gimple_location (stmt);
744 tree cont_label = create_artificial_label (loc);
745 tree next_label = create_artificial_label (loc);
746 tree dest, t, arg;
747 gimple g;
749 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
750 these builtins are modelled as non-local label jumps to the label
751 that is passed to these two builtins, so pretend we have a non-local
752 label during GIMPLE passes too. See PR60003. */
753 cfun->has_nonlocal_label = 1;
755 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
756 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
757 FORCED_LABEL (next_label) = 1;
759 dest = gimple_call_lhs (stmt);
761 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
762 arg = build_addr (next_label, current_function_decl);
763 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
764 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
765 gimple_set_location (g, loc);
766 gimple_set_block (g, gimple_block (stmt));
767 gsi_insert_before (gsi, g, GSI_SAME_STMT);
769 /* Build 'DEST = 0' and insert. */
770 if (dest)
772 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
773 gimple_set_location (g, loc);
774 gimple_set_block (g, gimple_block (stmt));
775 gsi_insert_before (gsi, g, GSI_SAME_STMT);
778 /* Build 'goto CONT_LABEL' and insert. */
779 g = gimple_build_goto (cont_label);
780 gsi_insert_before (gsi, g, GSI_SAME_STMT);
782 /* Build 'NEXT_LABEL:' and insert. */
783 g = gimple_build_label (next_label);
784 gsi_insert_before (gsi, g, GSI_SAME_STMT);
786 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
787 arg = build_addr (next_label, current_function_decl);
788 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
789 g = gimple_build_call (t, 1, arg);
790 gimple_set_location (g, loc);
791 gimple_set_block (g, gimple_block (stmt));
792 gsi_insert_before (gsi, g, GSI_SAME_STMT);
794 /* Build 'DEST = 1' and insert. */
795 if (dest)
797 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
798 integer_one_node));
799 gimple_set_location (g, loc);
800 gimple_set_block (g, gimple_block (stmt));
801 gsi_insert_before (gsi, g, GSI_SAME_STMT);
804 /* Build 'CONT_LABEL:' and insert. */
805 g = gimple_build_label (cont_label);
806 gsi_insert_before (gsi, g, GSI_SAME_STMT);
808 /* Remove the call to __builtin_setjmp. */
809 gsi_remove (gsi, false);
812 /* Lower calls to posix_memalign to
813 res = posix_memalign (ptr, align, size);
814 if (res == 0)
815 *ptr = __builtin_assume_aligned (*ptr, align);
816 or to
817 void *tem;
818 res = posix_memalign (&tem, align, size);
819 if (res == 0)
820 ptr = __builtin_assume_aligned (tem, align);
821 in case the first argument was &ptr. That way we can get at the
822 alignment of the heap pointer in CCP. */
824 static void
825 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
827 gimple stmt, call = gsi_stmt (*gsi);
828 tree pptr = gimple_call_arg (call, 0);
829 tree align = gimple_call_arg (call, 1);
830 tree res = gimple_call_lhs (call);
831 tree ptr = create_tmp_reg (ptr_type_node);
832 if (TREE_CODE (pptr) == ADDR_EXPR)
834 tree tem = create_tmp_var (ptr_type_node);
835 TREE_ADDRESSABLE (tem) = 1;
836 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
837 stmt = gimple_build_assign (ptr, tem);
839 else
840 stmt = gimple_build_assign (ptr,
841 fold_build2 (MEM_REF, ptr_type_node, pptr,
842 build_int_cst (ptr_type_node, 0)));
843 if (res == NULL_TREE)
845 res = create_tmp_reg (integer_type_node);
846 gimple_call_set_lhs (call, res);
848 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
849 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
850 gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
851 align_label, noalign_label);
852 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
853 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
854 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
855 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
856 2, ptr, align);
857 gimple_call_set_lhs (stmt, ptr);
858 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
859 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
860 build_int_cst (ptr_type_node, 0)),
861 ptr);
862 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
863 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
867 /* Record the variables in VARS into function FN. */
869 void
870 record_vars_into (tree vars, tree fn)
872 for (; vars; vars = DECL_CHAIN (vars))
874 tree var = vars;
876 /* BIND_EXPRs contains also function/type/constant declarations
877 we don't need to care about. */
878 if (TREE_CODE (var) != VAR_DECL)
879 continue;
881 /* Nothing to do in this case. */
882 if (DECL_EXTERNAL (var))
883 continue;
885 /* Record the variable. */
886 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
891 /* Record the variables in VARS into current_function_decl. */
893 void
894 record_vars (tree vars)
896 record_vars_into (vars, current_function_decl);