2015-09-29 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-low.c
blobda5f938f5a3488b67eb5bb628f9355aa0babe82e
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2015 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "hard-reg-set.h"
28 #include "alias.h"
29 #include "fold-const.h"
30 #include "tree-nested.h"
31 #include "calls.h"
32 #include "internal-fn.h"
33 #include "gimple-iterator.h"
34 #include "tree-iterator.h"
35 #include "tree-inline.h"
36 #include "flags.h"
37 #include "diagnostic-core.h"
38 #include "tree-pass.h"
39 #include "langhooks.h"
40 #include "gimple-low.h"
41 #include "tree-nested.h"
43 /* The differences between High GIMPLE and Low GIMPLE are the
44 following:
46 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
48 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
49 flow and exception regions are built as an on-the-side region
50 hierarchy (See tree-eh.c:lower_eh_constructs).
52 3- Multiple identical return statements are grouped into a single
53 return and gotos to the unique return site. */
55 /* Match a return statement with a label. During lowering, we identify
56 identical return statements and replace duplicates with a jump to
57 the corresponding label. */
58 struct return_statements_t
60 tree label;
61 greturn *stmt;
63 typedef struct return_statements_t return_statements_t;
66 struct lower_data
68 /* Block the current statement belongs to. */
69 tree block;
71 /* A vector of label and return statements to be moved to the end
72 of the function. */
73 vec<return_statements_t> return_statements;
75 /* True if the current statement cannot fall through. */
76 bool cannot_fallthru;
79 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
81 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
82 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
83 static void lower_builtin_setjmp (gimple_stmt_iterator *);
84 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
87 /* Lower the body of current_function_decl from High GIMPLE into Low
88 GIMPLE. */
90 static unsigned int
91 lower_function_body (void)
93 struct lower_data data;
94 gimple_seq body = gimple_body (current_function_decl);
95 gimple_seq lowered_body;
96 gimple_stmt_iterator i;
97 gimple *bind;
98 gimple *x;
100 /* The gimplifier should've left a body of exactly one statement,
101 namely a GIMPLE_BIND. */
102 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
103 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
105 memset (&data, 0, sizeof (data));
106 data.block = DECL_INITIAL (current_function_decl);
107 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
108 BLOCK_CHAIN (data.block) = NULL_TREE;
109 TREE_ASM_WRITTEN (data.block) = 1;
110 data.return_statements.create (8);
112 bind = gimple_seq_first_stmt (body);
113 lowered_body = NULL;
114 gimple_seq_add_stmt (&lowered_body, bind);
115 i = gsi_start (lowered_body);
116 lower_gimple_bind (&i, &data);
118 i = gsi_last (lowered_body);
120 /* If the function falls off the end, we need a null return statement.
121 If we've already got one in the return_statements vector, we don't
122 need to do anything special. Otherwise build one by hand. */
123 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
124 if (may_fallthru
125 && (data.return_statements.is_empty ()
126 || (gimple_return_retval (data.return_statements.last().stmt)
127 != NULL)))
129 x = gimple_build_return (NULL);
130 gimple_set_location (x, cfun->function_end_locus);
131 gimple_set_block (x, DECL_INITIAL (current_function_decl));
132 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
133 may_fallthru = false;
136 /* If we lowered any return statements, emit the representative
137 at the end of the function. */
138 while (!data.return_statements.is_empty ())
140 return_statements_t t = data.return_statements.pop ();
141 x = gimple_build_label (t.label);
142 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
143 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
144 if (may_fallthru)
146 /* Remove the line number from the representative return statement.
147 It now fills in for the fallthru too. Failure to remove this
148 will result in incorrect results for coverage analysis. */
149 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
150 may_fallthru = false;
154 /* Once the old body has been lowered, replace it with the new
155 lowered sequence. */
156 gimple_set_body (current_function_decl, lowered_body);
158 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
159 BLOCK_SUBBLOCKS (data.block)
160 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
162 clear_block_marks (data.block);
163 data.return_statements.release ();
164 return 0;
167 namespace {
169 const pass_data pass_data_lower_cf =
171 GIMPLE_PASS, /* type */
172 "lower", /* name */
173 OPTGROUP_NONE, /* optinfo_flags */
174 TV_NONE, /* tv_id */
175 PROP_gimple_any, /* properties_required */
176 PROP_gimple_lcf, /* properties_provided */
177 0, /* properties_destroyed */
178 0, /* todo_flags_start */
179 0, /* todo_flags_finish */
182 class pass_lower_cf : public gimple_opt_pass
184 public:
185 pass_lower_cf (gcc::context *ctxt)
186 : gimple_opt_pass (pass_data_lower_cf, ctxt)
189 /* opt_pass methods: */
190 virtual unsigned int execute (function *) { return lower_function_body (); }
192 }; // class pass_lower_cf
194 } // anon namespace
196 gimple_opt_pass *
197 make_pass_lower_cf (gcc::context *ctxt)
199 return new pass_lower_cf (ctxt);
202 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
203 when they are changed -- if this has to be done, the lowering routine must
204 do it explicitly. DATA is passed through the recursion. */
206 static void
207 lower_sequence (gimple_seq *seq, struct lower_data *data)
209 gimple_stmt_iterator gsi;
211 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
212 lower_stmt (&gsi, data);
216 /* Lower the OpenMP directive statement pointed by GSI. DATA is
217 passed through the recursion. */
219 static void
220 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
222 gimple *stmt;
224 stmt = gsi_stmt (*gsi);
226 lower_sequence (gimple_omp_body_ptr (stmt), data);
227 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
228 gimple_omp_set_body (stmt, NULL);
229 gsi_next (gsi);
233 /* Lower statement GSI. DATA is passed through the recursion. We try to
234 track the fallthruness of statements and get rid of unreachable return
235 statements in order to prevent the EH lowering pass from adding useless
236 edges that can cause bogus warnings to be issued later; this guess need
237 not be 100% accurate, simply be conservative and reset cannot_fallthru
238 to false if we don't know. */
240 static void
241 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
243 gimple *stmt = gsi_stmt (*gsi);
245 gimple_set_block (stmt, data->block);
247 switch (gimple_code (stmt))
249 case GIMPLE_BIND:
250 lower_gimple_bind (gsi, data);
251 /* Propagate fallthruness. */
252 return;
254 case GIMPLE_COND:
255 case GIMPLE_GOTO:
256 case GIMPLE_SWITCH:
257 data->cannot_fallthru = true;
258 gsi_next (gsi);
259 return;
261 case GIMPLE_RETURN:
262 if (data->cannot_fallthru)
264 gsi_remove (gsi, false);
265 /* Propagate fallthruness. */
267 else
269 lower_gimple_return (gsi, data);
270 data->cannot_fallthru = true;
272 return;
274 case GIMPLE_TRY:
275 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
276 lower_try_catch (gsi, data);
277 else
279 /* It must be a GIMPLE_TRY_FINALLY. */
280 bool cannot_fallthru;
281 lower_sequence (gimple_try_eval_ptr (stmt), data);
282 cannot_fallthru = data->cannot_fallthru;
284 /* The finally clause is always executed after the try clause,
285 so if it does not fall through, then the try-finally will not
286 fall through. Otherwise, if the try clause does not fall
287 through, then when the finally clause falls through it will
288 resume execution wherever the try clause was going. So the
289 whole try-finally will only fall through if both the try
290 clause and the finally clause fall through. */
291 data->cannot_fallthru = false;
292 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
293 data->cannot_fallthru |= cannot_fallthru;
294 gsi_next (gsi);
296 return;
298 case GIMPLE_EH_ELSE:
300 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
301 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
302 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
304 break;
306 case GIMPLE_NOP:
307 case GIMPLE_ASM:
308 case GIMPLE_ASSIGN:
309 case GIMPLE_PREDICT:
310 case GIMPLE_LABEL:
311 case GIMPLE_EH_MUST_NOT_THROW:
312 case GIMPLE_OMP_FOR:
313 case GIMPLE_OMP_SECTIONS:
314 case GIMPLE_OMP_SECTIONS_SWITCH:
315 case GIMPLE_OMP_SECTION:
316 case GIMPLE_OMP_SINGLE:
317 case GIMPLE_OMP_MASTER:
318 case GIMPLE_OMP_TASKGROUP:
319 case GIMPLE_OMP_ORDERED:
320 case GIMPLE_OMP_CRITICAL:
321 case GIMPLE_OMP_RETURN:
322 case GIMPLE_OMP_ATOMIC_LOAD:
323 case GIMPLE_OMP_ATOMIC_STORE:
324 case GIMPLE_OMP_CONTINUE:
325 break;
327 case GIMPLE_CALL:
329 tree decl = gimple_call_fndecl (stmt);
330 unsigned i;
332 for (i = 0; i < gimple_call_num_args (stmt); i++)
334 tree arg = gimple_call_arg (stmt, i);
335 if (EXPR_P (arg))
336 TREE_SET_BLOCK (arg, data->block);
339 if (decl
340 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
342 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
344 lower_builtin_setjmp (gsi);
345 data->cannot_fallthru = false;
346 return;
348 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
349 && flag_tree_bit_ccp
350 && gimple_builtin_call_types_compatible_p (stmt, decl))
352 lower_builtin_posix_memalign (gsi);
353 return;
357 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
359 data->cannot_fallthru = true;
360 gsi_next (gsi);
361 return;
364 break;
366 case GIMPLE_OMP_PARALLEL:
367 case GIMPLE_OMP_TASK:
368 case GIMPLE_OMP_TARGET:
369 case GIMPLE_OMP_TEAMS:
370 data->cannot_fallthru = false;
371 lower_omp_directive (gsi, data);
372 data->cannot_fallthru = false;
373 return;
375 case GIMPLE_TRANSACTION:
376 lower_sequence (gimple_transaction_body_ptr (
377 as_a <gtransaction *> (stmt)),
378 data);
379 break;
381 default:
382 gcc_unreachable ();
385 data->cannot_fallthru = false;
386 gsi_next (gsi);
389 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
391 static void
392 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
394 tree old_block = data->block;
395 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
396 tree new_block = gimple_bind_block (stmt);
398 if (new_block)
400 if (new_block == old_block)
402 /* The outermost block of the original function may not be the
403 outermost statement chain of the gimplified function. So we
404 may see the outermost block just inside the function. */
405 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
406 new_block = NULL;
408 else
410 /* We do not expect to handle duplicate blocks. */
411 gcc_assert (!TREE_ASM_WRITTEN (new_block));
412 TREE_ASM_WRITTEN (new_block) = 1;
414 /* Block tree may get clobbered by inlining. Normally this would
415 be fixed in rest_of_decl_compilation using block notes, but
416 since we are not going to emit them, it is up to us. */
417 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
418 BLOCK_SUBBLOCKS (old_block) = new_block;
419 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
420 BLOCK_SUPERCONTEXT (new_block) = old_block;
422 data->block = new_block;
426 record_vars (gimple_bind_vars (stmt));
427 lower_sequence (gimple_bind_body_ptr (stmt), data);
429 if (new_block)
431 gcc_assert (data->block == new_block);
433 BLOCK_SUBBLOCKS (new_block)
434 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
435 data->block = old_block;
438 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
439 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
440 gsi_remove (gsi, false);
443 /* Same as above, but for a GIMPLE_TRY_CATCH. */
445 static void
446 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
448 bool cannot_fallthru;
449 gimple *stmt = gsi_stmt (*gsi);
450 gimple_stmt_iterator i;
452 /* We don't handle GIMPLE_TRY_FINALLY. */
453 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
455 lower_sequence (gimple_try_eval_ptr (stmt), data);
456 cannot_fallthru = data->cannot_fallthru;
458 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
459 switch (gimple_code (gsi_stmt (i)))
461 case GIMPLE_CATCH:
462 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
463 catch expression and a body. The whole try/catch may fall
464 through iff any of the catch bodies falls through. */
465 for (; !gsi_end_p (i); gsi_next (&i))
467 data->cannot_fallthru = false;
468 lower_sequence (gimple_catch_handler_ptr (
469 as_a <gcatch *> (gsi_stmt (i))),
470 data);
471 if (!data->cannot_fallthru)
472 cannot_fallthru = false;
474 break;
476 case GIMPLE_EH_FILTER:
477 /* The exception filter expression only matters if there is an
478 exception. If the exception does not match EH_FILTER_TYPES,
479 we will execute EH_FILTER_FAILURE, and we will fall through
480 if that falls through. If the exception does match
481 EH_FILTER_TYPES, the stack unwinder will continue up the
482 stack, so we will not fall through. We don't know whether we
483 will throw an exception which matches EH_FILTER_TYPES or not,
484 so we just ignore EH_FILTER_TYPES and assume that we might
485 throw an exception which doesn't match. */
486 data->cannot_fallthru = false;
487 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
488 if (!data->cannot_fallthru)
489 cannot_fallthru = false;
490 break;
492 default:
493 /* This case represents statements to be executed when an
494 exception occurs. Those statements are implicitly followed
495 by a GIMPLE_RESX to resume execution after the exception. So
496 in this case the try/catch never falls through. */
497 data->cannot_fallthru = false;
498 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
499 break;
502 data->cannot_fallthru = cannot_fallthru;
503 gsi_next (gsi);
507 /* Try to determine whether a TRY_CATCH expression can fall through.
508 This is a subroutine of gimple_stmt_may_fallthru. */
510 static bool
511 gimple_try_catch_may_fallthru (gtry *stmt)
513 gimple_stmt_iterator i;
515 /* We don't handle GIMPLE_TRY_FINALLY. */
516 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
518 /* If the TRY block can fall through, the whole TRY_CATCH can
519 fall through. */
520 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
521 return true;
523 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
524 switch (gimple_code (gsi_stmt (i)))
526 case GIMPLE_CATCH:
527 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
528 catch expression and a body. The whole try/catch may fall
529 through iff any of the catch bodies falls through. */
530 for (; !gsi_end_p (i); gsi_next (&i))
532 if (gimple_seq_may_fallthru (gimple_catch_handler (
533 as_a <gcatch *> (gsi_stmt (i)))))
534 return true;
536 return false;
538 case GIMPLE_EH_FILTER:
539 /* The exception filter expression only matters if there is an
540 exception. If the exception does not match EH_FILTER_TYPES,
541 we will execute EH_FILTER_FAILURE, and we will fall through
542 if that falls through. If the exception does match
543 EH_FILTER_TYPES, the stack unwinder will continue up the
544 stack, so we will not fall through. We don't know whether we
545 will throw an exception which matches EH_FILTER_TYPES or not,
546 so we just ignore EH_FILTER_TYPES and assume that we might
547 throw an exception which doesn't match. */
548 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
550 default:
551 /* This case represents statements to be executed when an
552 exception occurs. Those statements are implicitly followed
553 by a GIMPLE_RESX to resume execution after the exception. So
554 in this case the try/catch never falls through. */
555 return false;
560 /* Try to determine if we can continue executing the statement
561 immediately following STMT. This guess need not be 100% accurate;
562 simply be conservative and return true if we don't know. This is
563 used only to avoid stupidly generating extra code. If we're wrong,
564 we'll just delete the extra code later. */
566 bool
567 gimple_stmt_may_fallthru (gimple *stmt)
569 if (!stmt)
570 return true;
572 switch (gimple_code (stmt))
574 case GIMPLE_GOTO:
575 case GIMPLE_RETURN:
576 case GIMPLE_RESX:
577 /* Easy cases. If the last statement of the seq implies
578 control transfer, then we can't fall through. */
579 return false;
581 case GIMPLE_SWITCH:
582 /* Switch has already been lowered and represents a branch
583 to a selected label and hence can't fall through. */
584 return false;
586 case GIMPLE_COND:
587 /* GIMPLE_COND's are already lowered into a two-way branch. They
588 can't fall through. */
589 return false;
591 case GIMPLE_BIND:
592 return gimple_seq_may_fallthru (
593 gimple_bind_body (as_a <gbind *> (stmt)));
595 case GIMPLE_TRY:
596 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
597 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
599 /* It must be a GIMPLE_TRY_FINALLY. */
601 /* The finally clause is always executed after the try clause,
602 so if it does not fall through, then the try-finally will not
603 fall through. Otherwise, if the try clause does not fall
604 through, then when the finally clause falls through it will
605 resume execution wherever the try clause was going. So the
606 whole try-finally will only fall through if both the try
607 clause and the finally clause fall through. */
608 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
609 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
611 case GIMPLE_EH_ELSE:
613 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
614 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
615 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
616 eh_else_stmt)));
619 case GIMPLE_CALL:
620 /* Functions that do not return do not fall through. */
621 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
623 default:
624 return true;
629 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
631 bool
632 gimple_seq_may_fallthru (gimple_seq seq)
634 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
638 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
640 static void
641 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
643 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
644 gimple *t;
645 int i;
646 return_statements_t tmp_rs;
648 /* Match this up with an existing return statement that's been created. */
649 for (i = data->return_statements.length () - 1;
650 i >= 0; i--)
652 tmp_rs = data->return_statements[i];
654 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
656 /* Remove the line number from the representative return statement.
657 It now fills in for many such returns. Failure to remove this
658 will result in incorrect results for coverage analysis. */
659 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
661 goto found;
665 /* Not found. Create a new label and record the return statement. */
666 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
667 tmp_rs.stmt = stmt;
668 data->return_statements.safe_push (tmp_rs);
670 /* Generate a goto statement and remove the return statement. */
671 found:
672 /* When not optimizing, make sure user returns are preserved. */
673 if (!optimize && gimple_has_location (stmt))
674 DECL_ARTIFICIAL (tmp_rs.label) = 0;
675 t = gimple_build_goto (tmp_rs.label);
676 gimple_set_location (t, gimple_location (stmt));
677 gimple_set_block (t, gimple_block (stmt));
678 gsi_insert_before (gsi, t, GSI_SAME_STMT);
679 gsi_remove (gsi, false);
682 /* Lower a __builtin_setjmp GSI.
684 __builtin_setjmp is passed a pointer to an array of five words (not
685 all will be used on all machines). It operates similarly to the C
686 library function of the same name, but is more efficient.
688 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
689 __builtin_setjmp_receiver.
691 After full lowering, the body of the function should look like:
694 int D.1844;
695 int D.2844;
697 [...]
699 __builtin_setjmp_setup (&buf, &<D1847>);
700 D.1844 = 0;
701 goto <D1846>;
702 <D1847>:;
703 __builtin_setjmp_receiver (&<D1847>);
704 D.1844 = 1;
705 <D1846>:;
706 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
708 [...]
710 __builtin_setjmp_setup (&buf, &<D2847>);
711 D.2844 = 0;
712 goto <D2846>;
713 <D2847>:;
714 __builtin_setjmp_receiver (&<D2847>);
715 D.2844 = 1;
716 <D2846>:;
717 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
719 [...]
721 <D3850>:;
722 return;
725 During cfg creation an extra per-function (or per-OpenMP region)
726 block with ABNORMAL_DISPATCHER internal call will be added, unique
727 destination of all the abnormal call edges and the unique source of
728 all the abnormal edges to the receivers, thus keeping the complexity
729 explosion localized. */
731 static void
732 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
734 gimple *stmt = gsi_stmt (*gsi);
735 location_t loc = gimple_location (stmt);
736 tree cont_label = create_artificial_label (loc);
737 tree next_label = create_artificial_label (loc);
738 tree dest, t, arg;
739 gimple *g;
741 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
742 these builtins are modelled as non-local label jumps to the label
743 that is passed to these two builtins, so pretend we have a non-local
744 label during GIMPLE passes too. See PR60003. */
745 cfun->has_nonlocal_label = 1;
747 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
748 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
749 FORCED_LABEL (next_label) = 1;
751 dest = gimple_call_lhs (stmt);
753 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
754 arg = build_addr (next_label, current_function_decl);
755 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
756 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
757 gimple_set_location (g, loc);
758 gimple_set_block (g, gimple_block (stmt));
759 gsi_insert_before (gsi, g, GSI_SAME_STMT);
761 /* Build 'DEST = 0' and insert. */
762 if (dest)
764 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
765 gimple_set_location (g, loc);
766 gimple_set_block (g, gimple_block (stmt));
767 gsi_insert_before (gsi, g, GSI_SAME_STMT);
770 /* Build 'goto CONT_LABEL' and insert. */
771 g = gimple_build_goto (cont_label);
772 gsi_insert_before (gsi, g, GSI_SAME_STMT);
774 /* Build 'NEXT_LABEL:' and insert. */
775 g = gimple_build_label (next_label);
776 gsi_insert_before (gsi, g, GSI_SAME_STMT);
778 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
779 arg = build_addr (next_label, current_function_decl);
780 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
781 g = gimple_build_call (t, 1, arg);
782 gimple_set_location (g, loc);
783 gimple_set_block (g, gimple_block (stmt));
784 gsi_insert_before (gsi, g, GSI_SAME_STMT);
786 /* Build 'DEST = 1' and insert. */
787 if (dest)
789 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
790 integer_one_node));
791 gimple_set_location (g, loc);
792 gimple_set_block (g, gimple_block (stmt));
793 gsi_insert_before (gsi, g, GSI_SAME_STMT);
796 /* Build 'CONT_LABEL:' and insert. */
797 g = gimple_build_label (cont_label);
798 gsi_insert_before (gsi, g, GSI_SAME_STMT);
800 /* Remove the call to __builtin_setjmp. */
801 gsi_remove (gsi, false);
804 /* Lower calls to posix_memalign to
805 res = posix_memalign (ptr, align, size);
806 if (res == 0)
807 *ptr = __builtin_assume_aligned (*ptr, align);
808 or to
809 void *tem;
810 res = posix_memalign (&tem, align, size);
811 if (res == 0)
812 ptr = __builtin_assume_aligned (tem, align);
813 in case the first argument was &ptr. That way we can get at the
814 alignment of the heap pointer in CCP. */
816 static void
817 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
819 gimple *stmt, *call = gsi_stmt (*gsi);
820 tree pptr = gimple_call_arg (call, 0);
821 tree align = gimple_call_arg (call, 1);
822 tree res = gimple_call_lhs (call);
823 tree ptr = create_tmp_reg (ptr_type_node);
824 if (TREE_CODE (pptr) == ADDR_EXPR)
826 tree tem = create_tmp_var (ptr_type_node);
827 TREE_ADDRESSABLE (tem) = 1;
828 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
829 stmt = gimple_build_assign (ptr, tem);
831 else
832 stmt = gimple_build_assign (ptr,
833 fold_build2 (MEM_REF, ptr_type_node, pptr,
834 build_int_cst (ptr_type_node, 0)));
835 if (res == NULL_TREE)
837 res = create_tmp_reg (integer_type_node);
838 gimple_call_set_lhs (call, res);
840 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
841 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
842 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
843 align_label, noalign_label);
844 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
845 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
846 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
847 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
848 2, ptr, align);
849 gimple_call_set_lhs (stmt, ptr);
850 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
851 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
852 build_int_cst (ptr_type_node, 0)),
853 ptr);
854 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
855 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
859 /* Record the variables in VARS into function FN. */
861 void
862 record_vars_into (tree vars, tree fn)
864 for (; vars; vars = DECL_CHAIN (vars))
866 tree var = vars;
868 /* BIND_EXPRs contains also function/type/constant declarations
869 we don't need to care about. */
870 if (TREE_CODE (var) != VAR_DECL)
871 continue;
873 /* Nothing to do in this case. */
874 if (DECL_EXTERNAL (var))
875 continue;
877 /* Record the variable. */
878 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
883 /* Record the variables in VARS into current_function_decl. */
885 void
886 record_vars (tree vars)
888 record_vars_into (vars, current_function_decl);