* gimple-low.c (lower_function_body): Clear the location of the first
[official-gcc.git] / gcc / gimple-low.c
blob3507d3cb09d10daa94bdf6ef6a0787dc00f75c9c
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2014 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-nested.h"
27 #include "calls.h"
28 #include "predict.h"
29 #include "vec.h"
30 #include "hashtab.h"
31 #include "hash-set.h"
32 #include "machmode.h"
33 #include "hard-reg-set.h"
34 #include "input.h"
35 #include "function.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "is-a.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "tree-iterator.h"
44 #include "tree-inline.h"
45 #include "flags.h"
46 #include "diagnostic-core.h"
47 #include "tree-pass.h"
48 #include "langhooks.h"
49 #include "gimple-low.h"
50 #include "tree-nested.h"
52 /* The differences between High GIMPLE and Low GIMPLE are the
53 following:
55 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
57 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
58 flow and exception regions are built as an on-the-side region
59 hierarchy (See tree-eh.c:lower_eh_constructs).
61 3- Multiple identical return statements are grouped into a single
62 return and gotos to the unique return site. */
64 /* Match a return statement with a label. During lowering, we identify
65 identical return statements and replace duplicates with a jump to
66 the corresponding label. */
67 struct return_statements_t
69 tree label;
70 gimple stmt;
72 typedef struct return_statements_t return_statements_t;
75 struct lower_data
77 /* Block the current statement belongs to. */
78 tree block;
80 /* A vector of label and return statements to be moved to the end
81 of the function. */
82 vec<return_statements_t> return_statements;
84 /* True if the current statement cannot fall through. */
85 bool cannot_fallthru;
88 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
89 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
90 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
91 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
92 static void lower_builtin_setjmp (gimple_stmt_iterator *);
93 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
96 /* Lower the body of current_function_decl from High GIMPLE into Low
97 GIMPLE. */
99 static unsigned int
100 lower_function_body (void)
102 struct lower_data data;
103 gimple_seq body = gimple_body (current_function_decl);
104 gimple_seq lowered_body;
105 gimple_stmt_iterator i;
106 gimple bind;
107 gimple x;
109 /* The gimplifier should've left a body of exactly one statement,
110 namely a GIMPLE_BIND. */
111 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
112 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
114 memset (&data, 0, sizeof (data));
115 data.block = DECL_INITIAL (current_function_decl);
116 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
117 BLOCK_CHAIN (data.block) = NULL_TREE;
118 TREE_ASM_WRITTEN (data.block) = 1;
119 data.return_statements.create (8);
121 bind = gimple_seq_first_stmt (body);
122 lowered_body = NULL;
123 gimple_seq_add_stmt (&lowered_body, bind);
124 i = gsi_start (lowered_body);
125 lower_gimple_bind (&i, &data);
127 i = gsi_last (lowered_body);
129 /* If the function falls off the end, we need a null return statement.
130 If we've already got one in the return_statements vector, we don't
131 need to do anything special. Otherwise build one by hand. */
132 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
133 if (may_fallthru
134 && (data.return_statements.is_empty ()
135 || (gimple_return_retval (data.return_statements.last().stmt)
136 != NULL)))
138 x = gimple_build_return (NULL);
139 gimple_set_location (x, cfun->function_end_locus);
140 gimple_set_block (x, DECL_INITIAL (current_function_decl));
141 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
142 may_fallthru = false;
145 /* If we lowered any return statements, emit the representative
146 at the end of the function. */
147 while (!data.return_statements.is_empty ())
149 return_statements_t t = data.return_statements.pop ();
150 x = gimple_build_label (t.label);
151 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
152 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
153 if (may_fallthru)
155 /* Remove the line number from the representative return statement.
156 It now fills in for the fallthru too. Failure to remove this
157 will result in incorrect results for coverage analysis. */
158 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
159 may_fallthru = false;
163 /* Once the old body has been lowered, replace it with the new
164 lowered sequence. */
165 gimple_set_body (current_function_decl, lowered_body);
167 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
168 BLOCK_SUBBLOCKS (data.block)
169 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
171 clear_block_marks (data.block);
172 data.return_statements.release ();
173 return 0;
176 namespace {
178 const pass_data pass_data_lower_cf =
180 GIMPLE_PASS, /* type */
181 "lower", /* name */
182 OPTGROUP_NONE, /* optinfo_flags */
183 TV_NONE, /* tv_id */
184 PROP_gimple_any, /* properties_required */
185 PROP_gimple_lcf, /* properties_provided */
186 0, /* properties_destroyed */
187 0, /* todo_flags_start */
188 0, /* todo_flags_finish */
191 class pass_lower_cf : public gimple_opt_pass
193 public:
194 pass_lower_cf (gcc::context *ctxt)
195 : gimple_opt_pass (pass_data_lower_cf, ctxt)
198 /* opt_pass methods: */
199 virtual unsigned int execute (function *) { return lower_function_body (); }
201 }; // class pass_lower_cf
203 } // anon namespace
205 gimple_opt_pass *
206 make_pass_lower_cf (gcc::context *ctxt)
208 return new pass_lower_cf (ctxt);
211 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
212 when they are changed -- if this has to be done, the lowering routine must
213 do it explicitly. DATA is passed through the recursion. */
215 static void
216 lower_sequence (gimple_seq *seq, struct lower_data *data)
218 gimple_stmt_iterator gsi;
220 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
221 lower_stmt (&gsi, data);
225 /* Lower the OpenMP directive statement pointed by GSI. DATA is
226 passed through the recursion. */
228 static void
229 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
231 gimple stmt;
233 stmt = gsi_stmt (*gsi);
235 lower_sequence (gimple_omp_body_ptr (stmt), data);
236 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
237 gimple_omp_set_body (stmt, NULL);
238 gsi_next (gsi);
242 /* Lower statement GSI. DATA is passed through the recursion. We try to
243 track the fallthruness of statements and get rid of unreachable return
244 statements in order to prevent the EH lowering pass from adding useless
245 edges that can cause bogus warnings to be issued later; this guess need
246 not be 100% accurate, simply be conservative and reset cannot_fallthru
247 to false if we don't know. */
249 static void
250 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
252 gimple stmt = gsi_stmt (*gsi);
254 gimple_set_block (stmt, data->block);
256 switch (gimple_code (stmt))
258 case GIMPLE_BIND:
259 lower_gimple_bind (gsi, data);
260 /* Propagate fallthruness. */
261 return;
263 case GIMPLE_COND:
264 case GIMPLE_GOTO:
265 case GIMPLE_SWITCH:
266 data->cannot_fallthru = true;
267 gsi_next (gsi);
268 return;
270 case GIMPLE_RETURN:
271 if (data->cannot_fallthru)
273 gsi_remove (gsi, false);
274 /* Propagate fallthruness. */
276 else
278 lower_gimple_return (gsi, data);
279 data->cannot_fallthru = true;
281 return;
283 case GIMPLE_TRY:
284 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
285 lower_try_catch (gsi, data);
286 else
288 /* It must be a GIMPLE_TRY_FINALLY. */
289 bool cannot_fallthru;
290 lower_sequence (gimple_try_eval_ptr (stmt), data);
291 cannot_fallthru = data->cannot_fallthru;
293 /* The finally clause is always executed after the try clause,
294 so if it does not fall through, then the try-finally will not
295 fall through. Otherwise, if the try clause does not fall
296 through, then when the finally clause falls through it will
297 resume execution wherever the try clause was going. So the
298 whole try-finally will only fall through if both the try
299 clause and the finally clause fall through. */
300 data->cannot_fallthru = false;
301 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
302 data->cannot_fallthru |= cannot_fallthru;
303 gsi_next (gsi);
305 return;
307 case GIMPLE_EH_ELSE:
308 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
309 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
310 break;
312 case GIMPLE_NOP:
313 case GIMPLE_ASM:
314 case GIMPLE_ASSIGN:
315 case GIMPLE_PREDICT:
316 case GIMPLE_LABEL:
317 case GIMPLE_EH_MUST_NOT_THROW:
318 case GIMPLE_OMP_FOR:
319 case GIMPLE_OMP_SECTIONS:
320 case GIMPLE_OMP_SECTIONS_SWITCH:
321 case GIMPLE_OMP_SECTION:
322 case GIMPLE_OMP_SINGLE:
323 case GIMPLE_OMP_MASTER:
324 case GIMPLE_OMP_TASKGROUP:
325 case GIMPLE_OMP_ORDERED:
326 case GIMPLE_OMP_CRITICAL:
327 case GIMPLE_OMP_RETURN:
328 case GIMPLE_OMP_ATOMIC_LOAD:
329 case GIMPLE_OMP_ATOMIC_STORE:
330 case GIMPLE_OMP_CONTINUE:
331 break;
333 case GIMPLE_CALL:
335 tree decl = gimple_call_fndecl (stmt);
336 unsigned i;
338 for (i = 0; i < gimple_call_num_args (stmt); i++)
340 tree arg = gimple_call_arg (stmt, i);
341 if (EXPR_P (arg))
342 TREE_SET_BLOCK (arg, data->block);
345 if (decl
346 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
348 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
350 lower_builtin_setjmp (gsi);
351 data->cannot_fallthru = false;
352 return;
354 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
355 && flag_tree_bit_ccp)
357 lower_builtin_posix_memalign (gsi);
358 return;
362 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
364 data->cannot_fallthru = true;
365 gsi_next (gsi);
366 return;
369 break;
371 case GIMPLE_OMP_PARALLEL:
372 case GIMPLE_OMP_TASK:
373 case GIMPLE_OMP_TARGET:
374 case GIMPLE_OMP_TEAMS:
375 data->cannot_fallthru = false;
376 lower_omp_directive (gsi, data);
377 data->cannot_fallthru = false;
378 return;
380 case GIMPLE_TRANSACTION:
381 lower_sequence (gimple_transaction_body_ptr (stmt), data);
382 break;
384 default:
385 gcc_unreachable ();
388 data->cannot_fallthru = false;
389 gsi_next (gsi);
392 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
394 static void
395 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
397 tree old_block = data->block;
398 gimple stmt = gsi_stmt (*gsi);
399 tree new_block = gimple_bind_block (stmt);
401 if (new_block)
403 if (new_block == old_block)
405 /* The outermost block of the original function may not be the
406 outermost statement chain of the gimplified function. So we
407 may see the outermost block just inside the function. */
408 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
409 new_block = NULL;
411 else
413 /* We do not expect to handle duplicate blocks. */
414 gcc_assert (!TREE_ASM_WRITTEN (new_block));
415 TREE_ASM_WRITTEN (new_block) = 1;
417 /* Block tree may get clobbered by inlining. Normally this would
418 be fixed in rest_of_decl_compilation using block notes, but
419 since we are not going to emit them, it is up to us. */
420 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
421 BLOCK_SUBBLOCKS (old_block) = new_block;
422 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
423 BLOCK_SUPERCONTEXT (new_block) = old_block;
425 data->block = new_block;
429 record_vars (gimple_bind_vars (stmt));
430 lower_sequence (gimple_bind_body_ptr (stmt), data);
432 if (new_block)
434 gcc_assert (data->block == new_block);
436 BLOCK_SUBBLOCKS (new_block)
437 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
438 data->block = old_block;
441 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
442 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
443 gsi_remove (gsi, false);
446 /* Same as above, but for a GIMPLE_TRY_CATCH. */
448 static void
449 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
451 bool cannot_fallthru;
452 gimple stmt = gsi_stmt (*gsi);
453 gimple_stmt_iterator i;
455 /* We don't handle GIMPLE_TRY_FINALLY. */
456 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
458 lower_sequence (gimple_try_eval_ptr (stmt), data);
459 cannot_fallthru = data->cannot_fallthru;
461 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
462 switch (gimple_code (gsi_stmt (i)))
464 case GIMPLE_CATCH:
465 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
466 catch expression and a body. The whole try/catch may fall
467 through iff any of the catch bodies falls through. */
468 for (; !gsi_end_p (i); gsi_next (&i))
470 data->cannot_fallthru = false;
471 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
472 if (!data->cannot_fallthru)
473 cannot_fallthru = false;
475 break;
477 case GIMPLE_EH_FILTER:
478 /* The exception filter expression only matters if there is an
479 exception. If the exception does not match EH_FILTER_TYPES,
480 we will execute EH_FILTER_FAILURE, and we will fall through
481 if that falls through. If the exception does match
482 EH_FILTER_TYPES, the stack unwinder will continue up the
483 stack, so we will not fall through. We don't know whether we
484 will throw an exception which matches EH_FILTER_TYPES or not,
485 so we just ignore EH_FILTER_TYPES and assume that we might
486 throw an exception which doesn't match. */
487 data->cannot_fallthru = false;
488 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
489 if (!data->cannot_fallthru)
490 cannot_fallthru = false;
491 break;
493 default:
494 /* This case represents statements to be executed when an
495 exception occurs. Those statements are implicitly followed
496 by a GIMPLE_RESX to resume execution after the exception. So
497 in this case the try/catch never falls through. */
498 data->cannot_fallthru = false;
499 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
500 break;
503 data->cannot_fallthru = cannot_fallthru;
504 gsi_next (gsi);
508 /* Try to determine whether a TRY_CATCH expression can fall through.
509 This is a subroutine of gimple_stmt_may_fallthru. */
511 static bool
512 gimple_try_catch_may_fallthru (gimple stmt)
514 gimple_stmt_iterator i;
516 /* We don't handle GIMPLE_TRY_FINALLY. */
517 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
519 /* If the TRY block can fall through, the whole TRY_CATCH can
520 fall through. */
521 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
522 return true;
524 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
525 switch (gimple_code (gsi_stmt (i)))
527 case GIMPLE_CATCH:
528 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
529 catch expression and a body. The whole try/catch may fall
530 through iff any of the catch bodies falls through. */
531 for (; !gsi_end_p (i); gsi_next (&i))
533 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
534 return true;
536 return false;
538 case GIMPLE_EH_FILTER:
539 /* The exception filter expression only matters if there is an
540 exception. If the exception does not match EH_FILTER_TYPES,
541 we will execute EH_FILTER_FAILURE, and we will fall through
542 if that falls through. If the exception does match
543 EH_FILTER_TYPES, the stack unwinder will continue up the
544 stack, so we will not fall through. We don't know whether we
545 will throw an exception which matches EH_FILTER_TYPES or not,
546 so we just ignore EH_FILTER_TYPES and assume that we might
547 throw an exception which doesn't match. */
548 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
550 default:
551 /* This case represents statements to be executed when an
552 exception occurs. Those statements are implicitly followed
553 by a GIMPLE_RESX to resume execution after the exception. So
554 in this case the try/catch never falls through. */
555 return false;
560 /* Try to determine if we can continue executing the statement
561 immediately following STMT. This guess need not be 100% accurate;
562 simply be conservative and return true if we don't know. This is
563 used only to avoid stupidly generating extra code. If we're wrong,
564 we'll just delete the extra code later. */
566 bool
567 gimple_stmt_may_fallthru (gimple stmt)
569 if (!stmt)
570 return true;
572 switch (gimple_code (stmt))
574 case GIMPLE_GOTO:
575 case GIMPLE_RETURN:
576 case GIMPLE_RESX:
577 /* Easy cases. If the last statement of the seq implies
578 control transfer, then we can't fall through. */
579 return false;
581 case GIMPLE_SWITCH:
582 /* Switch has already been lowered and represents a branch
583 to a selected label and hence can't fall through. */
584 return false;
586 case GIMPLE_COND:
587 /* GIMPLE_COND's are already lowered into a two-way branch. They
588 can't fall through. */
589 return false;
591 case GIMPLE_BIND:
592 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
594 case GIMPLE_TRY:
595 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
596 return gimple_try_catch_may_fallthru (stmt);
598 /* It must be a GIMPLE_TRY_FINALLY. */
600 /* The finally clause is always executed after the try clause,
601 so if it does not fall through, then the try-finally will not
602 fall through. Otherwise, if the try clause does not fall
603 through, then when the finally clause falls through it will
604 resume execution wherever the try clause was going. So the
605 whole try-finally will only fall through if both the try
606 clause and the finally clause fall through. */
607 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
608 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
610 case GIMPLE_EH_ELSE:
611 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
612 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
614 case GIMPLE_CALL:
615 /* Functions that do not return do not fall through. */
616 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
618 default:
619 return true;
624 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
626 bool
627 gimple_seq_may_fallthru (gimple_seq seq)
629 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
633 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
635 static void
636 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
638 gimple stmt = gsi_stmt (*gsi);
639 gimple t;
640 int i;
641 return_statements_t tmp_rs;
643 /* Match this up with an existing return statement that's been created. */
644 for (i = data->return_statements.length () - 1;
645 i >= 0; i--)
647 tmp_rs = data->return_statements[i];
649 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
651 /* Remove the line number from the representative return statement.
652 It now fills in for many such returns. Failure to remove this
653 will result in incorrect results for coverage analysis. */
654 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
656 goto found;
660 /* Not found. Create a new label and record the return statement. */
661 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
662 tmp_rs.stmt = stmt;
663 data->return_statements.safe_push (tmp_rs);
665 /* Generate a goto statement and remove the return statement. */
666 found:
667 /* When not optimizing, make sure user returns are preserved. */
668 if (!optimize && gimple_has_location (stmt))
669 DECL_ARTIFICIAL (tmp_rs.label) = 0;
670 t = gimple_build_goto (tmp_rs.label);
671 gimple_set_location (t, gimple_location (stmt));
672 gimple_set_block (t, gimple_block (stmt));
673 gsi_insert_before (gsi, t, GSI_SAME_STMT);
674 gsi_remove (gsi, false);
677 /* Lower a __builtin_setjmp GSI.
679 __builtin_setjmp is passed a pointer to an array of five words (not
680 all will be used on all machines). It operates similarly to the C
681 library function of the same name, but is more efficient.
683 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
684 __builtin_setjmp_receiver.
686 After full lowering, the body of the function should look like:
689 int D.1844;
690 int D.2844;
692 [...]
694 __builtin_setjmp_setup (&buf, &<D1847>);
695 D.1844 = 0;
696 goto <D1846>;
697 <D1847>:;
698 __builtin_setjmp_receiver (&<D1847>);
699 D.1844 = 1;
700 <D1846>:;
701 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
703 [...]
705 __builtin_setjmp_setup (&buf, &<D2847>);
706 D.2844 = 0;
707 goto <D2846>;
708 <D2847>:;
709 __builtin_setjmp_receiver (&<D2847>);
710 D.2844 = 1;
711 <D2846>:;
712 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
714 [...]
716 <D3850>:;
717 return;
720 During cfg creation an extra per-function (or per-OpenMP region)
721 block with ABNORMAL_DISPATCHER internal call will be added, unique
722 destination of all the abnormal call edges and the unique source of
723 all the abnormal edges to the receivers, thus keeping the complexity
724 explosion localized. */
726 static void
727 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
729 gimple stmt = gsi_stmt (*gsi);
730 location_t loc = gimple_location (stmt);
731 tree cont_label = create_artificial_label (loc);
732 tree next_label = create_artificial_label (loc);
733 tree dest, t, arg;
734 gimple g;
736 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
737 these builtins are modelled as non-local label jumps to the label
738 that is passed to these two builtins, so pretend we have a non-local
739 label during GIMPLE passes too. See PR60003. */
740 cfun->has_nonlocal_label = 1;
742 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
743 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
744 FORCED_LABEL (next_label) = 1;
746 dest = gimple_call_lhs (stmt);
748 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
749 arg = build_addr (next_label, current_function_decl);
750 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
751 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
752 gimple_set_location (g, loc);
753 gimple_set_block (g, gimple_block (stmt));
754 gsi_insert_before (gsi, g, GSI_SAME_STMT);
756 /* Build 'DEST = 0' and insert. */
757 if (dest)
759 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
760 gimple_set_location (g, loc);
761 gimple_set_block (g, gimple_block (stmt));
762 gsi_insert_before (gsi, g, GSI_SAME_STMT);
765 /* Build 'goto CONT_LABEL' and insert. */
766 g = gimple_build_goto (cont_label);
767 gsi_insert_before (gsi, g, GSI_SAME_STMT);
769 /* Build 'NEXT_LABEL:' and insert. */
770 g = gimple_build_label (next_label);
771 gsi_insert_before (gsi, g, GSI_SAME_STMT);
773 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
774 arg = build_addr (next_label, current_function_decl);
775 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
776 g = gimple_build_call (t, 1, arg);
777 gimple_set_location (g, loc);
778 gimple_set_block (g, gimple_block (stmt));
779 gsi_insert_before (gsi, g, GSI_SAME_STMT);
781 /* Build 'DEST = 1' and insert. */
782 if (dest)
784 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
785 integer_one_node));
786 gimple_set_location (g, loc);
787 gimple_set_block (g, gimple_block (stmt));
788 gsi_insert_before (gsi, g, GSI_SAME_STMT);
791 /* Build 'CONT_LABEL:' and insert. */
792 g = gimple_build_label (cont_label);
793 gsi_insert_before (gsi, g, GSI_SAME_STMT);
795 /* Remove the call to __builtin_setjmp. */
796 gsi_remove (gsi, false);
799 /* Lower calls to posix_memalign to
800 res = posix_memalign (ptr, align, size);
801 if (res == 0)
802 *ptr = __builtin_assume_aligned (*ptr, align);
803 or to
804 void *tem;
805 res = posix_memalign (&tem, align, size);
806 if (res == 0)
807 ptr = __builtin_assume_aligned (tem, align);
808 in case the first argument was &ptr. That way we can get at the
809 alignment of the heap pointer in CCP. */
811 static void
812 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
814 gimple stmt, call = gsi_stmt (*gsi);
815 tree pptr = gimple_call_arg (call, 0);
816 tree align = gimple_call_arg (call, 1);
817 tree res = gimple_call_lhs (call);
818 tree ptr = create_tmp_reg (ptr_type_node, NULL);
819 if (TREE_CODE (pptr) == ADDR_EXPR)
821 tree tem = create_tmp_var (ptr_type_node, NULL);
822 TREE_ADDRESSABLE (tem) = 1;
823 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
824 stmt = gimple_build_assign (ptr, tem);
826 else
827 stmt = gimple_build_assign (ptr,
828 fold_build2 (MEM_REF, ptr_type_node, pptr,
829 build_int_cst (ptr_type_node, 0)));
830 if (res == NULL_TREE)
832 res = create_tmp_reg (integer_type_node, NULL);
833 gimple_call_set_lhs (call, res);
835 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
836 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
837 gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
838 align_label, noalign_label);
839 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
840 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
841 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
842 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
843 2, ptr, align);
844 gimple_call_set_lhs (stmt, ptr);
845 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
846 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
847 build_int_cst (ptr_type_node, 0)),
848 ptr);
849 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
850 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
854 /* Record the variables in VARS into function FN. */
856 void
857 record_vars_into (tree vars, tree fn)
859 for (; vars; vars = DECL_CHAIN (vars))
861 tree var = vars;
863 /* BIND_EXPRs contains also function/type/constant declarations
864 we don't need to care about. */
865 if (TREE_CODE (var) != VAR_DECL)
866 continue;
868 /* Nothing to do in this case. */
869 if (DECL_EXTERNAL (var))
870 continue;
872 /* Record the variable. */
873 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
878 /* Record the variables in VARS into current_function_decl. */
880 void
881 record_vars (tree vars)
883 record_vars_into (vars, current_function_decl);