kernel - support dummy reallocblks in devfs
[dragonfly.git] / contrib / gcc-5.0 / gcc / gimple-low.c
blob90bb530609cd3750d3155dc9ad845773fb1ccce3
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2015 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "tree-nested.h"
37 #include "calls.h"
38 #include "predict.h"
39 #include "hard-reg-set.h"
40 #include "input.h"
41 #include "function.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "flags.h"
52 #include "diagnostic-core.h"
53 #include "tree-pass.h"
54 #include "langhooks.h"
55 #include "gimple-low.h"
56 #include "tree-nested.h"
58 /* The differences between High GIMPLE and Low GIMPLE are the
59 following:
61 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
63 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
64 flow and exception regions are built as an on-the-side region
65 hierarchy (See tree-eh.c:lower_eh_constructs).
67 3- Multiple identical return statements are grouped into a single
68 return and gotos to the unique return site. */
70 /* Match a return statement with a label. During lowering, we identify
71 identical return statements and replace duplicates with a jump to
72 the corresponding label. */
73 struct return_statements_t
75 tree label;
76 greturn *stmt;
78 typedef struct return_statements_t return_statements_t;
81 struct lower_data
83 /* Block the current statement belongs to. */
84 tree block;
86 /* A vector of label and return statements to be moved to the end
87 of the function. */
88 vec<return_statements_t> return_statements;
90 /* True if the current statement cannot fall through. */
91 bool cannot_fallthru;
94 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
95 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
96 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
97 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
98 static void lower_builtin_setjmp (gimple_stmt_iterator *);
99 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
102 /* Lower the body of current_function_decl from High GIMPLE into Low
103 GIMPLE. */
105 static unsigned int
106 lower_function_body (void)
108 struct lower_data data;
109 gimple_seq body = gimple_body (current_function_decl);
110 gimple_seq lowered_body;
111 gimple_stmt_iterator i;
112 gimple bind;
113 gimple x;
115 /* The gimplifier should've left a body of exactly one statement,
116 namely a GIMPLE_BIND. */
117 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
118 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
120 memset (&data, 0, sizeof (data));
121 data.block = DECL_INITIAL (current_function_decl);
122 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
123 BLOCK_CHAIN (data.block) = NULL_TREE;
124 TREE_ASM_WRITTEN (data.block) = 1;
125 data.return_statements.create (8);
127 bind = gimple_seq_first_stmt (body);
128 lowered_body = NULL;
129 gimple_seq_add_stmt (&lowered_body, bind);
130 i = gsi_start (lowered_body);
131 lower_gimple_bind (&i, &data);
133 i = gsi_last (lowered_body);
135 /* If the function falls off the end, we need a null return statement.
136 If we've already got one in the return_statements vector, we don't
137 need to do anything special. Otherwise build one by hand. */
138 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
139 if (may_fallthru
140 && (data.return_statements.is_empty ()
141 || (gimple_return_retval (data.return_statements.last().stmt)
142 != NULL)))
144 x = gimple_build_return (NULL);
145 gimple_set_location (x, cfun->function_end_locus);
146 gimple_set_block (x, DECL_INITIAL (current_function_decl));
147 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
148 may_fallthru = false;
151 /* If we lowered any return statements, emit the representative
152 at the end of the function. */
153 while (!data.return_statements.is_empty ())
155 return_statements_t t = data.return_statements.pop ();
156 x = gimple_build_label (t.label);
157 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
158 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
159 if (may_fallthru)
161 /* Remove the line number from the representative return statement.
162 It now fills in for the fallthru too. Failure to remove this
163 will result in incorrect results for coverage analysis. */
164 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
165 may_fallthru = false;
169 /* Once the old body has been lowered, replace it with the new
170 lowered sequence. */
171 gimple_set_body (current_function_decl, lowered_body);
173 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
174 BLOCK_SUBBLOCKS (data.block)
175 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
177 clear_block_marks (data.block);
178 data.return_statements.release ();
179 return 0;
182 namespace {
184 const pass_data pass_data_lower_cf =
186 GIMPLE_PASS, /* type */
187 "lower", /* name */
188 OPTGROUP_NONE, /* optinfo_flags */
189 TV_NONE, /* tv_id */
190 PROP_gimple_any, /* properties_required */
191 PROP_gimple_lcf, /* properties_provided */
192 0, /* properties_destroyed */
193 0, /* todo_flags_start */
194 0, /* todo_flags_finish */
197 class pass_lower_cf : public gimple_opt_pass
199 public:
200 pass_lower_cf (gcc::context *ctxt)
201 : gimple_opt_pass (pass_data_lower_cf, ctxt)
204 /* opt_pass methods: */
205 virtual unsigned int execute (function *) { return lower_function_body (); }
207 }; // class pass_lower_cf
209 } // anon namespace
211 gimple_opt_pass *
212 make_pass_lower_cf (gcc::context *ctxt)
214 return new pass_lower_cf (ctxt);
217 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
218 when they are changed -- if this has to be done, the lowering routine must
219 do it explicitly. DATA is passed through the recursion. */
221 static void
222 lower_sequence (gimple_seq *seq, struct lower_data *data)
224 gimple_stmt_iterator gsi;
226 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
227 lower_stmt (&gsi, data);
231 /* Lower the OpenMP directive statement pointed by GSI. DATA is
232 passed through the recursion. */
234 static void
235 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
237 gimple stmt;
239 stmt = gsi_stmt (*gsi);
241 lower_sequence (gimple_omp_body_ptr (stmt), data);
242 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
243 gimple_omp_set_body (stmt, NULL);
244 gsi_next (gsi);
248 /* Lower statement GSI. DATA is passed through the recursion. We try to
249 track the fallthruness of statements and get rid of unreachable return
250 statements in order to prevent the EH lowering pass from adding useless
251 edges that can cause bogus warnings to be issued later; this guess need
252 not be 100% accurate, simply be conservative and reset cannot_fallthru
253 to false if we don't know. */
255 static void
256 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
258 gimple stmt = gsi_stmt (*gsi);
260 gimple_set_block (stmt, data->block);
262 switch (gimple_code (stmt))
264 case GIMPLE_BIND:
265 lower_gimple_bind (gsi, data);
266 /* Propagate fallthruness. */
267 return;
269 case GIMPLE_COND:
270 case GIMPLE_GOTO:
271 case GIMPLE_SWITCH:
272 data->cannot_fallthru = true;
273 gsi_next (gsi);
274 return;
276 case GIMPLE_RETURN:
277 if (data->cannot_fallthru)
279 gsi_remove (gsi, false);
280 /* Propagate fallthruness. */
282 else
284 lower_gimple_return (gsi, data);
285 data->cannot_fallthru = true;
287 return;
289 case GIMPLE_TRY:
290 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
291 lower_try_catch (gsi, data);
292 else
294 /* It must be a GIMPLE_TRY_FINALLY. */
295 bool cannot_fallthru;
296 lower_sequence (gimple_try_eval_ptr (stmt), data);
297 cannot_fallthru = data->cannot_fallthru;
299 /* The finally clause is always executed after the try clause,
300 so if it does not fall through, then the try-finally will not
301 fall through. Otherwise, if the try clause does not fall
302 through, then when the finally clause falls through it will
303 resume execution wherever the try clause was going. So the
304 whole try-finally will only fall through if both the try
305 clause and the finally clause fall through. */
306 data->cannot_fallthru = false;
307 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
308 data->cannot_fallthru |= cannot_fallthru;
309 gsi_next (gsi);
311 return;
313 case GIMPLE_EH_ELSE:
315 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
316 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
317 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
319 break;
321 case GIMPLE_NOP:
322 case GIMPLE_ASM:
323 case GIMPLE_ASSIGN:
324 case GIMPLE_PREDICT:
325 case GIMPLE_LABEL:
326 case GIMPLE_EH_MUST_NOT_THROW:
327 case GIMPLE_OMP_FOR:
328 case GIMPLE_OMP_SECTIONS:
329 case GIMPLE_OMP_SECTIONS_SWITCH:
330 case GIMPLE_OMP_SECTION:
331 case GIMPLE_OMP_SINGLE:
332 case GIMPLE_OMP_MASTER:
333 case GIMPLE_OMP_TASKGROUP:
334 case GIMPLE_OMP_ORDERED:
335 case GIMPLE_OMP_CRITICAL:
336 case GIMPLE_OMP_RETURN:
337 case GIMPLE_OMP_ATOMIC_LOAD:
338 case GIMPLE_OMP_ATOMIC_STORE:
339 case GIMPLE_OMP_CONTINUE:
340 break;
342 case GIMPLE_CALL:
344 tree decl = gimple_call_fndecl (stmt);
345 unsigned i;
347 for (i = 0; i < gimple_call_num_args (stmt); i++)
349 tree arg = gimple_call_arg (stmt, i);
350 if (EXPR_P (arg))
351 TREE_SET_BLOCK (arg, data->block);
354 if (decl
355 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
357 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
359 lower_builtin_setjmp (gsi);
360 data->cannot_fallthru = false;
361 return;
363 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
364 && flag_tree_bit_ccp
365 && gimple_builtin_call_types_compatible_p (stmt, decl))
367 lower_builtin_posix_memalign (gsi);
368 return;
372 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
374 data->cannot_fallthru = true;
375 gsi_next (gsi);
376 return;
379 break;
381 case GIMPLE_OMP_PARALLEL:
382 case GIMPLE_OMP_TASK:
383 case GIMPLE_OMP_TARGET:
384 case GIMPLE_OMP_TEAMS:
385 data->cannot_fallthru = false;
386 lower_omp_directive (gsi, data);
387 data->cannot_fallthru = false;
388 return;
390 case GIMPLE_TRANSACTION:
391 lower_sequence (gimple_transaction_body_ptr (
392 as_a <gtransaction *> (stmt)),
393 data);
394 break;
396 default:
397 gcc_unreachable ();
400 data->cannot_fallthru = false;
401 gsi_next (gsi);
404 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
406 static void
407 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
409 tree old_block = data->block;
410 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
411 tree new_block = gimple_bind_block (stmt);
413 if (new_block)
415 if (new_block == old_block)
417 /* The outermost block of the original function may not be the
418 outermost statement chain of the gimplified function. So we
419 may see the outermost block just inside the function. */
420 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
421 new_block = NULL;
423 else
425 /* We do not expect to handle duplicate blocks. */
426 gcc_assert (!TREE_ASM_WRITTEN (new_block));
427 TREE_ASM_WRITTEN (new_block) = 1;
429 /* Block tree may get clobbered by inlining. Normally this would
430 be fixed in rest_of_decl_compilation using block notes, but
431 since we are not going to emit them, it is up to us. */
432 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
433 BLOCK_SUBBLOCKS (old_block) = new_block;
434 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
435 BLOCK_SUPERCONTEXT (new_block) = old_block;
437 data->block = new_block;
441 record_vars (gimple_bind_vars (stmt));
442 lower_sequence (gimple_bind_body_ptr (stmt), data);
444 if (new_block)
446 gcc_assert (data->block == new_block);
448 BLOCK_SUBBLOCKS (new_block)
449 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
450 data->block = old_block;
453 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
454 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
455 gsi_remove (gsi, false);
458 /* Same as above, but for a GIMPLE_TRY_CATCH. */
460 static void
461 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
463 bool cannot_fallthru;
464 gimple stmt = gsi_stmt (*gsi);
465 gimple_stmt_iterator i;
467 /* We don't handle GIMPLE_TRY_FINALLY. */
468 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
470 lower_sequence (gimple_try_eval_ptr (stmt), data);
471 cannot_fallthru = data->cannot_fallthru;
473 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
474 switch (gimple_code (gsi_stmt (i)))
476 case GIMPLE_CATCH:
477 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
478 catch expression and a body. The whole try/catch may fall
479 through iff any of the catch bodies falls through. */
480 for (; !gsi_end_p (i); gsi_next (&i))
482 data->cannot_fallthru = false;
483 lower_sequence (gimple_catch_handler_ptr (
484 as_a <gcatch *> (gsi_stmt (i))),
485 data);
486 if (!data->cannot_fallthru)
487 cannot_fallthru = false;
489 break;
491 case GIMPLE_EH_FILTER:
492 /* The exception filter expression only matters if there is an
493 exception. If the exception does not match EH_FILTER_TYPES,
494 we will execute EH_FILTER_FAILURE, and we will fall through
495 if that falls through. If the exception does match
496 EH_FILTER_TYPES, the stack unwinder will continue up the
497 stack, so we will not fall through. We don't know whether we
498 will throw an exception which matches EH_FILTER_TYPES or not,
499 so we just ignore EH_FILTER_TYPES and assume that we might
500 throw an exception which doesn't match. */
501 data->cannot_fallthru = false;
502 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
503 if (!data->cannot_fallthru)
504 cannot_fallthru = false;
505 break;
507 default:
508 /* This case represents statements to be executed when an
509 exception occurs. Those statements are implicitly followed
510 by a GIMPLE_RESX to resume execution after the exception. So
511 in this case the try/catch never falls through. */
512 data->cannot_fallthru = false;
513 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
514 break;
517 data->cannot_fallthru = cannot_fallthru;
518 gsi_next (gsi);
522 /* Try to determine whether a TRY_CATCH expression can fall through.
523 This is a subroutine of gimple_stmt_may_fallthru. */
525 static bool
526 gimple_try_catch_may_fallthru (gtry *stmt)
528 gimple_stmt_iterator i;
530 /* We don't handle GIMPLE_TRY_FINALLY. */
531 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
533 /* If the TRY block can fall through, the whole TRY_CATCH can
534 fall through. */
535 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
536 return true;
538 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
539 switch (gimple_code (gsi_stmt (i)))
541 case GIMPLE_CATCH:
542 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
543 catch expression and a body. The whole try/catch may fall
544 through iff any of the catch bodies falls through. */
545 for (; !gsi_end_p (i); gsi_next (&i))
547 if (gimple_seq_may_fallthru (gimple_catch_handler (
548 as_a <gcatch *> (gsi_stmt (i)))))
549 return true;
551 return false;
553 case GIMPLE_EH_FILTER:
554 /* The exception filter expression only matters if there is an
555 exception. If the exception does not match EH_FILTER_TYPES,
556 we will execute EH_FILTER_FAILURE, and we will fall through
557 if that falls through. If the exception does match
558 EH_FILTER_TYPES, the stack unwinder will continue up the
559 stack, so we will not fall through. We don't know whether we
560 will throw an exception which matches EH_FILTER_TYPES or not,
561 so we just ignore EH_FILTER_TYPES and assume that we might
562 throw an exception which doesn't match. */
563 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
565 default:
566 /* This case represents statements to be executed when an
567 exception occurs. Those statements are implicitly followed
568 by a GIMPLE_RESX to resume execution after the exception. So
569 in this case the try/catch never falls through. */
570 return false;
575 /* Try to determine if we can continue executing the statement
576 immediately following STMT. This guess need not be 100% accurate;
577 simply be conservative and return true if we don't know. This is
578 used only to avoid stupidly generating extra code. If we're wrong,
579 we'll just delete the extra code later. */
581 bool
582 gimple_stmt_may_fallthru (gimple stmt)
584 if (!stmt)
585 return true;
587 switch (gimple_code (stmt))
589 case GIMPLE_GOTO:
590 case GIMPLE_RETURN:
591 case GIMPLE_RESX:
592 /* Easy cases. If the last statement of the seq implies
593 control transfer, then we can't fall through. */
594 return false;
596 case GIMPLE_SWITCH:
597 /* Switch has already been lowered and represents a branch
598 to a selected label and hence can't fall through. */
599 return false;
601 case GIMPLE_COND:
602 /* GIMPLE_COND's are already lowered into a two-way branch. They
603 can't fall through. */
604 return false;
606 case GIMPLE_BIND:
607 return gimple_seq_may_fallthru (
608 gimple_bind_body (as_a <gbind *> (stmt)));
610 case GIMPLE_TRY:
611 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
612 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
614 /* It must be a GIMPLE_TRY_FINALLY. */
616 /* The finally clause is always executed after the try clause,
617 so if it does not fall through, then the try-finally will not
618 fall through. Otherwise, if the try clause does not fall
619 through, then when the finally clause falls through it will
620 resume execution wherever the try clause was going. So the
621 whole try-finally will only fall through if both the try
622 clause and the finally clause fall through. */
623 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
624 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
626 case GIMPLE_EH_ELSE:
628 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
629 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
630 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
631 eh_else_stmt)));
634 case GIMPLE_CALL:
635 /* Functions that do not return do not fall through. */
636 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
638 default:
639 return true;
644 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
646 bool
647 gimple_seq_may_fallthru (gimple_seq seq)
649 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
653 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
655 static void
656 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
658 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
659 gimple t;
660 int i;
661 return_statements_t tmp_rs;
663 /* Match this up with an existing return statement that's been created. */
664 for (i = data->return_statements.length () - 1;
665 i >= 0; i--)
667 tmp_rs = data->return_statements[i];
669 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
671 /* Remove the line number from the representative return statement.
672 It now fills in for many such returns. Failure to remove this
673 will result in incorrect results for coverage analysis. */
674 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
676 goto found;
680 /* Not found. Create a new label and record the return statement. */
681 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
682 tmp_rs.stmt = stmt;
683 data->return_statements.safe_push (tmp_rs);
685 /* Generate a goto statement and remove the return statement. */
686 found:
687 /* When not optimizing, make sure user returns are preserved. */
688 if (!optimize && gimple_has_location (stmt))
689 DECL_ARTIFICIAL (tmp_rs.label) = 0;
690 t = gimple_build_goto (tmp_rs.label);
691 gimple_set_location (t, gimple_location (stmt));
692 gimple_set_block (t, gimple_block (stmt));
693 gsi_insert_before (gsi, t, GSI_SAME_STMT);
694 gsi_remove (gsi, false);
697 /* Lower a __builtin_setjmp GSI.
699 __builtin_setjmp is passed a pointer to an array of five words (not
700 all will be used on all machines). It operates similarly to the C
701 library function of the same name, but is more efficient.
703 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
704 __builtin_setjmp_receiver.
706 After full lowering, the body of the function should look like:
709 int D.1844;
710 int D.2844;
712 [...]
714 __builtin_setjmp_setup (&buf, &<D1847>);
715 D.1844 = 0;
716 goto <D1846>;
717 <D1847>:;
718 __builtin_setjmp_receiver (&<D1847>);
719 D.1844 = 1;
720 <D1846>:;
721 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
723 [...]
725 __builtin_setjmp_setup (&buf, &<D2847>);
726 D.2844 = 0;
727 goto <D2846>;
728 <D2847>:;
729 __builtin_setjmp_receiver (&<D2847>);
730 D.2844 = 1;
731 <D2846>:;
732 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
734 [...]
736 <D3850>:;
737 return;
740 During cfg creation an extra per-function (or per-OpenMP region)
741 block with ABNORMAL_DISPATCHER internal call will be added, unique
742 destination of all the abnormal call edges and the unique source of
743 all the abnormal edges to the receivers, thus keeping the complexity
744 explosion localized. */
746 static void
747 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
749 gimple stmt = gsi_stmt (*gsi);
750 location_t loc = gimple_location (stmt);
751 tree cont_label = create_artificial_label (loc);
752 tree next_label = create_artificial_label (loc);
753 tree dest, t, arg;
754 gimple g;
756 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
757 these builtins are modelled as non-local label jumps to the label
758 that is passed to these two builtins, so pretend we have a non-local
759 label during GIMPLE passes too. See PR60003. */
760 cfun->has_nonlocal_label = 1;
762 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
763 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
764 FORCED_LABEL (next_label) = 1;
766 dest = gimple_call_lhs (stmt);
768 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
769 arg = build_addr (next_label, current_function_decl);
770 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
771 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
772 gimple_set_location (g, loc);
773 gimple_set_block (g, gimple_block (stmt));
774 gsi_insert_before (gsi, g, GSI_SAME_STMT);
776 /* Build 'DEST = 0' and insert. */
777 if (dest)
779 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
780 gimple_set_location (g, loc);
781 gimple_set_block (g, gimple_block (stmt));
782 gsi_insert_before (gsi, g, GSI_SAME_STMT);
785 /* Build 'goto CONT_LABEL' and insert. */
786 g = gimple_build_goto (cont_label);
787 gsi_insert_before (gsi, g, GSI_SAME_STMT);
789 /* Build 'NEXT_LABEL:' and insert. */
790 g = gimple_build_label (next_label);
791 gsi_insert_before (gsi, g, GSI_SAME_STMT);
793 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
794 arg = build_addr (next_label, current_function_decl);
795 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
796 g = gimple_build_call (t, 1, arg);
797 gimple_set_location (g, loc);
798 gimple_set_block (g, gimple_block (stmt));
799 gsi_insert_before (gsi, g, GSI_SAME_STMT);
801 /* Build 'DEST = 1' and insert. */
802 if (dest)
804 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
805 integer_one_node));
806 gimple_set_location (g, loc);
807 gimple_set_block (g, gimple_block (stmt));
808 gsi_insert_before (gsi, g, GSI_SAME_STMT);
811 /* Build 'CONT_LABEL:' and insert. */
812 g = gimple_build_label (cont_label);
813 gsi_insert_before (gsi, g, GSI_SAME_STMT);
815 /* Remove the call to __builtin_setjmp. */
816 gsi_remove (gsi, false);
819 /* Lower calls to posix_memalign to
820 res = posix_memalign (ptr, align, size);
821 if (res == 0)
822 *ptr = __builtin_assume_aligned (*ptr, align);
823 or to
824 void *tem;
825 res = posix_memalign (&tem, align, size);
826 if (res == 0)
827 ptr = __builtin_assume_aligned (tem, align);
828 in case the first argument was &ptr. That way we can get at the
829 alignment of the heap pointer in CCP. */
831 static void
832 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
834 gimple stmt, call = gsi_stmt (*gsi);
835 tree pptr = gimple_call_arg (call, 0);
836 tree align = gimple_call_arg (call, 1);
837 tree res = gimple_call_lhs (call);
838 tree ptr = create_tmp_reg (ptr_type_node);
839 if (TREE_CODE (pptr) == ADDR_EXPR)
841 tree tem = create_tmp_var (ptr_type_node);
842 TREE_ADDRESSABLE (tem) = 1;
843 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
844 stmt = gimple_build_assign (ptr, tem);
846 else
847 stmt = gimple_build_assign (ptr,
848 fold_build2 (MEM_REF, ptr_type_node, pptr,
849 build_int_cst (ptr_type_node, 0)));
850 if (res == NULL_TREE)
852 res = create_tmp_reg (integer_type_node);
853 gimple_call_set_lhs (call, res);
855 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
856 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
857 gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
858 align_label, noalign_label);
859 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
860 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
861 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
862 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
863 2, ptr, align);
864 gimple_call_set_lhs (stmt, ptr);
865 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
866 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
867 build_int_cst (ptr_type_node, 0)),
868 ptr);
869 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
870 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
874 /* Record the variables in VARS into function FN. */
876 void
877 record_vars_into (tree vars, tree fn)
879 for (; vars; vars = DECL_CHAIN (vars))
881 tree var = vars;
883 /* BIND_EXPRs contains also function/type/constant declarations
884 we don't need to care about. */
885 if (TREE_CODE (var) != VAR_DECL)
886 continue;
888 /* Nothing to do in this case. */
889 if (DECL_EXTERNAL (var))
890 continue;
892 /* Record the variable. */
893 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
898 /* Record the variables in VARS into current_function_decl. */
900 void
901 record_vars (tree vars)
903 record_vars_into (vars, current_function_decl);