Fix bootstrap/PR63632
[official-gcc.git] / gcc / gimple-low.c
blob76100c6e6aadfe944638620d50266b665d0404a6
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2014 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-nested.h"
27 #include "calls.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "gimple-iterator.h"
35 #include "tree-iterator.h"
36 #include "tree-inline.h"
37 #include "flags.h"
38 #include "hashtab.h"
39 #include "hash-set.h"
40 #include "vec.h"
41 #include "machmode.h"
42 #include "hard-reg-set.h"
43 #include "input.h"
44 #include "function.h"
45 #include "diagnostic-core.h"
46 #include "tree-pass.h"
47 #include "langhooks.h"
48 #include "gimple-low.h"
49 #include "tree-nested.h"
51 /* The differences between High GIMPLE and Low GIMPLE are the
52 following:
54 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
56 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
57 flow and exception regions are built as an on-the-side region
58 hierarchy (See tree-eh.c:lower_eh_constructs).
60 3- Multiple identical return statements are grouped into a single
61 return and gotos to the unique return site. */
63 /* Match a return statement with a label. During lowering, we identify
64 identical return statements and replace duplicates with a jump to
65 the corresponding label. */
66 struct return_statements_t
68 tree label;
69 gimple stmt;
71 typedef struct return_statements_t return_statements_t;
74 struct lower_data
76 /* Block the current statement belongs to. */
77 tree block;
79 /* A vector of label and return statements to be moved to the end
80 of the function. */
81 vec<return_statements_t> return_statements;
83 /* True if the current statement cannot fall through. */
84 bool cannot_fallthru;
87 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
88 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
89 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
90 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
91 static void lower_builtin_setjmp (gimple_stmt_iterator *);
92 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
95 /* Lower the body of current_function_decl from High GIMPLE into Low
96 GIMPLE. */
98 static unsigned int
99 lower_function_body (void)
101 struct lower_data data;
102 gimple_seq body = gimple_body (current_function_decl);
103 gimple_seq lowered_body;
104 gimple_stmt_iterator i;
105 gimple bind;
106 gimple x;
108 /* The gimplifier should've left a body of exactly one statement,
109 namely a GIMPLE_BIND. */
110 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
111 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
113 memset (&data, 0, sizeof (data));
114 data.block = DECL_INITIAL (current_function_decl);
115 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
116 BLOCK_CHAIN (data.block) = NULL_TREE;
117 TREE_ASM_WRITTEN (data.block) = 1;
118 data.return_statements.create (8);
120 bind = gimple_seq_first_stmt (body);
121 lowered_body = NULL;
122 gimple_seq_add_stmt (&lowered_body, bind);
123 i = gsi_start (lowered_body);
124 lower_gimple_bind (&i, &data);
126 i = gsi_last (lowered_body);
128 /* If the function falls off the end, we need a null return statement.
129 If we've already got one in the return_statements vector, we don't
130 need to do anything special. Otherwise build one by hand. */
131 if (gimple_seq_may_fallthru (lowered_body)
132 && (data.return_statements.is_empty ()
133 || (gimple_return_retval (data.return_statements.last().stmt)
134 != NULL)))
136 x = gimple_build_return (NULL);
137 gimple_set_location (x, cfun->function_end_locus);
138 gimple_set_block (x, DECL_INITIAL (current_function_decl));
139 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
142 /* If we lowered any return statements, emit the representative
143 at the end of the function. */
144 while (!data.return_statements.is_empty ())
146 return_statements_t t = data.return_statements.pop ();
147 x = gimple_build_label (t.label);
148 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
149 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
152 /* Once the old body has been lowered, replace it with the new
153 lowered sequence. */
154 gimple_set_body (current_function_decl, lowered_body);
156 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
157 BLOCK_SUBBLOCKS (data.block)
158 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
160 clear_block_marks (data.block);
161 data.return_statements.release ();
162 return 0;
165 namespace {
167 const pass_data pass_data_lower_cf =
169 GIMPLE_PASS, /* type */
170 "lower", /* name */
171 OPTGROUP_NONE, /* optinfo_flags */
172 TV_NONE, /* tv_id */
173 PROP_gimple_any, /* properties_required */
174 PROP_gimple_lcf, /* properties_provided */
175 0, /* properties_destroyed */
176 0, /* todo_flags_start */
177 0, /* todo_flags_finish */
180 class pass_lower_cf : public gimple_opt_pass
182 public:
183 pass_lower_cf (gcc::context *ctxt)
184 : gimple_opt_pass (pass_data_lower_cf, ctxt)
187 /* opt_pass methods: */
188 virtual unsigned int execute (function *) { return lower_function_body (); }
190 }; // class pass_lower_cf
192 } // anon namespace
194 gimple_opt_pass *
195 make_pass_lower_cf (gcc::context *ctxt)
197 return new pass_lower_cf (ctxt);
200 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
201 when they are changed -- if this has to be done, the lowering routine must
202 do it explicitly. DATA is passed through the recursion. */
204 static void
205 lower_sequence (gimple_seq *seq, struct lower_data *data)
207 gimple_stmt_iterator gsi;
209 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
210 lower_stmt (&gsi, data);
214 /* Lower the OpenMP directive statement pointed by GSI. DATA is
215 passed through the recursion. */
217 static void
218 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
220 gimple stmt;
222 stmt = gsi_stmt (*gsi);
224 lower_sequence (gimple_omp_body_ptr (stmt), data);
225 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
226 gimple_omp_set_body (stmt, NULL);
227 gsi_next (gsi);
231 /* Lower statement GSI. DATA is passed through the recursion. We try to
232 track the fallthruness of statements and get rid of unreachable return
233 statements in order to prevent the EH lowering pass from adding useless
234 edges that can cause bogus warnings to be issued later; this guess need
235 not be 100% accurate, simply be conservative and reset cannot_fallthru
236 to false if we don't know. */
238 static void
239 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
241 gimple stmt = gsi_stmt (*gsi);
243 gimple_set_block (stmt, data->block);
245 switch (gimple_code (stmt))
247 case GIMPLE_BIND:
248 lower_gimple_bind (gsi, data);
249 /* Propagate fallthruness. */
250 return;
252 case GIMPLE_COND:
253 case GIMPLE_GOTO:
254 case GIMPLE_SWITCH:
255 data->cannot_fallthru = true;
256 gsi_next (gsi);
257 return;
259 case GIMPLE_RETURN:
260 if (data->cannot_fallthru)
262 gsi_remove (gsi, false);
263 /* Propagate fallthruness. */
265 else
267 lower_gimple_return (gsi, data);
268 data->cannot_fallthru = true;
270 return;
272 case GIMPLE_TRY:
273 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
274 lower_try_catch (gsi, data);
275 else
277 /* It must be a GIMPLE_TRY_FINALLY. */
278 bool cannot_fallthru;
279 lower_sequence (gimple_try_eval_ptr (stmt), data);
280 cannot_fallthru = data->cannot_fallthru;
282 /* The finally clause is always executed after the try clause,
283 so if it does not fall through, then the try-finally will not
284 fall through. Otherwise, if the try clause does not fall
285 through, then when the finally clause falls through it will
286 resume execution wherever the try clause was going. So the
287 whole try-finally will only fall through if both the try
288 clause and the finally clause fall through. */
289 data->cannot_fallthru = false;
290 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
291 data->cannot_fallthru |= cannot_fallthru;
292 gsi_next (gsi);
294 return;
296 case GIMPLE_EH_ELSE:
297 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
298 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
299 break;
301 case GIMPLE_NOP:
302 case GIMPLE_ASM:
303 case GIMPLE_ASSIGN:
304 case GIMPLE_PREDICT:
305 case GIMPLE_LABEL:
306 case GIMPLE_EH_MUST_NOT_THROW:
307 case GIMPLE_OMP_FOR:
308 case GIMPLE_OMP_SECTIONS:
309 case GIMPLE_OMP_SECTIONS_SWITCH:
310 case GIMPLE_OMP_SECTION:
311 case GIMPLE_OMP_SINGLE:
312 case GIMPLE_OMP_MASTER:
313 case GIMPLE_OMP_TASKGROUP:
314 case GIMPLE_OMP_ORDERED:
315 case GIMPLE_OMP_CRITICAL:
316 case GIMPLE_OMP_RETURN:
317 case GIMPLE_OMP_ATOMIC_LOAD:
318 case GIMPLE_OMP_ATOMIC_STORE:
319 case GIMPLE_OMP_CONTINUE:
320 break;
322 case GIMPLE_CALL:
324 tree decl = gimple_call_fndecl (stmt);
325 unsigned i;
327 for (i = 0; i < gimple_call_num_args (stmt); i++)
329 tree arg = gimple_call_arg (stmt, i);
330 if (EXPR_P (arg))
331 TREE_SET_BLOCK (arg, data->block);
334 if (decl
335 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
337 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
339 lower_builtin_setjmp (gsi);
340 data->cannot_fallthru = false;
341 return;
343 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
344 && flag_tree_bit_ccp)
346 lower_builtin_posix_memalign (gsi);
347 return;
351 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
353 data->cannot_fallthru = true;
354 gsi_next (gsi);
355 return;
358 break;
360 case GIMPLE_OMP_PARALLEL:
361 case GIMPLE_OMP_TASK:
362 case GIMPLE_OMP_TARGET:
363 case GIMPLE_OMP_TEAMS:
364 data->cannot_fallthru = false;
365 lower_omp_directive (gsi, data);
366 data->cannot_fallthru = false;
367 return;
369 case GIMPLE_TRANSACTION:
370 lower_sequence (gimple_transaction_body_ptr (stmt), data);
371 break;
373 default:
374 gcc_unreachable ();
377 data->cannot_fallthru = false;
378 gsi_next (gsi);
381 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
383 static void
384 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
386 tree old_block = data->block;
387 gimple stmt = gsi_stmt (*gsi);
388 tree new_block = gimple_bind_block (stmt);
390 if (new_block)
392 if (new_block == old_block)
394 /* The outermost block of the original function may not be the
395 outermost statement chain of the gimplified function. So we
396 may see the outermost block just inside the function. */
397 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
398 new_block = NULL;
400 else
402 /* We do not expect to handle duplicate blocks. */
403 gcc_assert (!TREE_ASM_WRITTEN (new_block));
404 TREE_ASM_WRITTEN (new_block) = 1;
406 /* Block tree may get clobbered by inlining. Normally this would
407 be fixed in rest_of_decl_compilation using block notes, but
408 since we are not going to emit them, it is up to us. */
409 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
410 BLOCK_SUBBLOCKS (old_block) = new_block;
411 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
412 BLOCK_SUPERCONTEXT (new_block) = old_block;
414 data->block = new_block;
418 record_vars (gimple_bind_vars (stmt));
419 lower_sequence (gimple_bind_body_ptr (stmt), data);
421 if (new_block)
423 gcc_assert (data->block == new_block);
425 BLOCK_SUBBLOCKS (new_block)
426 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
427 data->block = old_block;
430 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
431 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
432 gsi_remove (gsi, false);
435 /* Same as above, but for a GIMPLE_TRY_CATCH. */
437 static void
438 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
440 bool cannot_fallthru;
441 gimple stmt = gsi_stmt (*gsi);
442 gimple_stmt_iterator i;
444 /* We don't handle GIMPLE_TRY_FINALLY. */
445 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
447 lower_sequence (gimple_try_eval_ptr (stmt), data);
448 cannot_fallthru = data->cannot_fallthru;
450 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
451 switch (gimple_code (gsi_stmt (i)))
453 case GIMPLE_CATCH:
454 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
455 catch expression and a body. The whole try/catch may fall
456 through iff any of the catch bodies falls through. */
457 for (; !gsi_end_p (i); gsi_next (&i))
459 data->cannot_fallthru = false;
460 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
461 if (!data->cannot_fallthru)
462 cannot_fallthru = false;
464 break;
466 case GIMPLE_EH_FILTER:
467 /* The exception filter expression only matters if there is an
468 exception. If the exception does not match EH_FILTER_TYPES,
469 we will execute EH_FILTER_FAILURE, and we will fall through
470 if that falls through. If the exception does match
471 EH_FILTER_TYPES, the stack unwinder will continue up the
472 stack, so we will not fall through. We don't know whether we
473 will throw an exception which matches EH_FILTER_TYPES or not,
474 so we just ignore EH_FILTER_TYPES and assume that we might
475 throw an exception which doesn't match. */
476 data->cannot_fallthru = false;
477 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
478 if (!data->cannot_fallthru)
479 cannot_fallthru = false;
480 break;
482 default:
483 /* This case represents statements to be executed when an
484 exception occurs. Those statements are implicitly followed
485 by a GIMPLE_RESX to resume execution after the exception. So
486 in this case the try/catch never falls through. */
487 data->cannot_fallthru = false;
488 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
489 break;
492 data->cannot_fallthru = cannot_fallthru;
493 gsi_next (gsi);
497 /* Try to determine whether a TRY_CATCH expression can fall through.
498 This is a subroutine of gimple_stmt_may_fallthru. */
500 static bool
501 gimple_try_catch_may_fallthru (gimple stmt)
503 gimple_stmt_iterator i;
505 /* We don't handle GIMPLE_TRY_FINALLY. */
506 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
508 /* If the TRY block can fall through, the whole TRY_CATCH can
509 fall through. */
510 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
511 return true;
513 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
514 switch (gimple_code (gsi_stmt (i)))
516 case GIMPLE_CATCH:
517 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
518 catch expression and a body. The whole try/catch may fall
519 through iff any of the catch bodies falls through. */
520 for (; !gsi_end_p (i); gsi_next (&i))
522 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
523 return true;
525 return false;
527 case GIMPLE_EH_FILTER:
528 /* The exception filter expression only matters if there is an
529 exception. If the exception does not match EH_FILTER_TYPES,
530 we will execute EH_FILTER_FAILURE, and we will fall through
531 if that falls through. If the exception does match
532 EH_FILTER_TYPES, the stack unwinder will continue up the
533 stack, so we will not fall through. We don't know whether we
534 will throw an exception which matches EH_FILTER_TYPES or not,
535 so we just ignore EH_FILTER_TYPES and assume that we might
536 throw an exception which doesn't match. */
537 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
539 default:
540 /* This case represents statements to be executed when an
541 exception occurs. Those statements are implicitly followed
542 by a GIMPLE_RESX to resume execution after the exception. So
543 in this case the try/catch never falls through. */
544 return false;
549 /* Try to determine if we can continue executing the statement
550 immediately following STMT. This guess need not be 100% accurate;
551 simply be conservative and return true if we don't know. This is
552 used only to avoid stupidly generating extra code. If we're wrong,
553 we'll just delete the extra code later. */
555 bool
556 gimple_stmt_may_fallthru (gimple stmt)
558 if (!stmt)
559 return true;
561 switch (gimple_code (stmt))
563 case GIMPLE_GOTO:
564 case GIMPLE_RETURN:
565 case GIMPLE_RESX:
566 /* Easy cases. If the last statement of the seq implies
567 control transfer, then we can't fall through. */
568 return false;
570 case GIMPLE_SWITCH:
571 /* Switch has already been lowered and represents a branch
572 to a selected label and hence can't fall through. */
573 return false;
575 case GIMPLE_COND:
576 /* GIMPLE_COND's are already lowered into a two-way branch. They
577 can't fall through. */
578 return false;
580 case GIMPLE_BIND:
581 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
583 case GIMPLE_TRY:
584 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
585 return gimple_try_catch_may_fallthru (stmt);
587 /* It must be a GIMPLE_TRY_FINALLY. */
589 /* The finally clause is always executed after the try clause,
590 so if it does not fall through, then the try-finally will not
591 fall through. Otherwise, if the try clause does not fall
592 through, then when the finally clause falls through it will
593 resume execution wherever the try clause was going. So the
594 whole try-finally will only fall through if both the try
595 clause and the finally clause fall through. */
596 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
597 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
599 case GIMPLE_EH_ELSE:
600 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
601 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
603 case GIMPLE_CALL:
604 /* Functions that do not return do not fall through. */
605 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
607 default:
608 return true;
613 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
615 bool
616 gimple_seq_may_fallthru (gimple_seq seq)
618 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
622 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
624 static void
625 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
627 gimple stmt = gsi_stmt (*gsi);
628 gimple t;
629 int i;
630 return_statements_t tmp_rs;
632 /* Match this up with an existing return statement that's been created. */
633 for (i = data->return_statements.length () - 1;
634 i >= 0; i--)
636 tmp_rs = data->return_statements[i];
638 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
640 /* Remove the line number from the representative return statement.
641 It now fills in for many such returns. Failure to remove this
642 will result in incorrect results for coverage analysis. */
643 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
645 goto found;
649 /* Not found. Create a new label and record the return statement. */
650 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
651 tmp_rs.stmt = stmt;
652 data->return_statements.safe_push (tmp_rs);
654 /* Generate a goto statement and remove the return statement. */
655 found:
656 /* When not optimizing, make sure user returns are preserved. */
657 if (!optimize && gimple_has_location (stmt))
658 DECL_ARTIFICIAL (tmp_rs.label) = 0;
659 t = gimple_build_goto (tmp_rs.label);
660 gimple_set_location (t, gimple_location (stmt));
661 gimple_set_block (t, gimple_block (stmt));
662 gsi_insert_before (gsi, t, GSI_SAME_STMT);
663 gsi_remove (gsi, false);
666 /* Lower a __builtin_setjmp GSI.
668 __builtin_setjmp is passed a pointer to an array of five words (not
669 all will be used on all machines). It operates similarly to the C
670 library function of the same name, but is more efficient.
672 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
673 __builtin_setjmp_receiver.
675 After full lowering, the body of the function should look like:
678 int D.1844;
679 int D.2844;
681 [...]
683 __builtin_setjmp_setup (&buf, &<D1847>);
684 D.1844 = 0;
685 goto <D1846>;
686 <D1847>:;
687 __builtin_setjmp_receiver (&<D1847>);
688 D.1844 = 1;
689 <D1846>:;
690 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
692 [...]
694 __builtin_setjmp_setup (&buf, &<D2847>);
695 D.2844 = 0;
696 goto <D2846>;
697 <D2847>:;
698 __builtin_setjmp_receiver (&<D2847>);
699 D.2844 = 1;
700 <D2846>:;
701 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
703 [...]
705 <D3850>:;
706 return;
709 During cfg creation an extra per-function (or per-OpenMP region)
710 block with ABNORMAL_DISPATCHER internal call will be added, unique
711 destination of all the abnormal call edges and the unique source of
712 all the abnormal edges to the receivers, thus keeping the complexity
713 explosion localized. */
715 static void
716 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
718 gimple stmt = gsi_stmt (*gsi);
719 location_t loc = gimple_location (stmt);
720 tree cont_label = create_artificial_label (loc);
721 tree next_label = create_artificial_label (loc);
722 tree dest, t, arg;
723 gimple g;
725 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
726 these builtins are modelled as non-local label jumps to the label
727 that is passed to these two builtins, so pretend we have a non-local
728 label during GIMPLE passes too. See PR60003. */
729 cfun->has_nonlocal_label = 1;
731 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
732 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
733 FORCED_LABEL (next_label) = 1;
735 dest = gimple_call_lhs (stmt);
737 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
738 arg = build_addr (next_label, current_function_decl);
739 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
740 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
741 gimple_set_location (g, loc);
742 gimple_set_block (g, gimple_block (stmt));
743 gsi_insert_before (gsi, g, GSI_SAME_STMT);
745 /* Build 'DEST = 0' and insert. */
746 if (dest)
748 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
749 gimple_set_location (g, loc);
750 gimple_set_block (g, gimple_block (stmt));
751 gsi_insert_before (gsi, g, GSI_SAME_STMT);
754 /* Build 'goto CONT_LABEL' and insert. */
755 g = gimple_build_goto (cont_label);
756 gsi_insert_before (gsi, g, GSI_SAME_STMT);
758 /* Build 'NEXT_LABEL:' and insert. */
759 g = gimple_build_label (next_label);
760 gsi_insert_before (gsi, g, GSI_SAME_STMT);
762 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
763 arg = build_addr (next_label, current_function_decl);
764 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
765 g = gimple_build_call (t, 1, arg);
766 gimple_set_location (g, loc);
767 gimple_set_block (g, gimple_block (stmt));
768 gsi_insert_before (gsi, g, GSI_SAME_STMT);
770 /* Build 'DEST = 1' and insert. */
771 if (dest)
773 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
774 integer_one_node));
775 gimple_set_location (g, loc);
776 gimple_set_block (g, gimple_block (stmt));
777 gsi_insert_before (gsi, g, GSI_SAME_STMT);
780 /* Build 'CONT_LABEL:' and insert. */
781 g = gimple_build_label (cont_label);
782 gsi_insert_before (gsi, g, GSI_SAME_STMT);
784 /* Remove the call to __builtin_setjmp. */
785 gsi_remove (gsi, false);
788 /* Lower calls to posix_memalign to
789 res = posix_memalign (ptr, align, size);
790 if (res == 0)
791 *ptr = __builtin_assume_aligned (*ptr, align);
792 or to
793 void *tem;
794 res = posix_memalign (&tem, align, size);
795 if (res == 0)
796 ptr = __builtin_assume_aligned (tem, align);
797 in case the first argument was &ptr. That way we can get at the
798 alignment of the heap pointer in CCP. */
800 static void
801 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
803 gimple stmt, call = gsi_stmt (*gsi);
804 tree pptr = gimple_call_arg (call, 0);
805 tree align = gimple_call_arg (call, 1);
806 tree res = gimple_call_lhs (call);
807 tree ptr = create_tmp_reg (ptr_type_node, NULL);
808 if (TREE_CODE (pptr) == ADDR_EXPR)
810 tree tem = create_tmp_var (ptr_type_node, NULL);
811 TREE_ADDRESSABLE (tem) = 1;
812 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
813 stmt = gimple_build_assign (ptr, tem);
815 else
816 stmt = gimple_build_assign (ptr,
817 fold_build2 (MEM_REF, ptr_type_node, pptr,
818 build_int_cst (ptr_type_node, 0)));
819 if (res == NULL_TREE)
821 res = create_tmp_reg (integer_type_node, NULL);
822 gimple_call_set_lhs (call, res);
824 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
825 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
826 gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
827 align_label, noalign_label);
828 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
829 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
830 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
831 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
832 2, ptr, align);
833 gimple_call_set_lhs (stmt, ptr);
834 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
835 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
836 build_int_cst (ptr_type_node, 0)),
837 ptr);
838 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
839 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
843 /* Record the variables in VARS into function FN. */
845 void
846 record_vars_into (tree vars, tree fn)
848 for (; vars; vars = DECL_CHAIN (vars))
850 tree var = vars;
852 /* BIND_EXPRs contains also function/type/constant declarations
853 we don't need to care about. */
854 if (TREE_CODE (var) != VAR_DECL)
855 continue;
857 /* Nothing to do in this case. */
858 if (DECL_EXTERNAL (var))
859 continue;
861 /* Record the variable. */
862 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
867 /* Record the variables in VARS into current_function_decl. */
869 void
870 record_vars (tree vars)
872 record_vars_into (vars, current_function_decl);