1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2017 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
34 /* The differences between High GIMPLE and Low GIMPLE are the
37 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
39 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
40 flow and exception regions are built as an on-the-side region
41 hierarchy (See tree-eh.c:lower_eh_constructs).
43 3- Multiple identical return statements are grouped into a single
44 return and gotos to the unique return site. */
46 /* Match a return statement with a label. During lowering, we identify
47 identical return statements and replace duplicates with a jump to
48 the corresponding label. */
49 struct return_statements_t
54 typedef struct return_statements_t return_statements_t
;
59 /* Block the current statement belongs to. */
62 /* A vector of label and return statements to be moved to the end
64 vec
<return_statements_t
> return_statements
;
66 /* True if the current statement cannot fall through. */
70 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
71 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
72 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
73 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
74 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
75 static void lower_builtin_posix_memalign (gimple_stmt_iterator
*);
78 /* Lower the body of current_function_decl from High GIMPLE into Low
82 lower_function_body (void)
84 struct lower_data data
;
85 gimple_seq body
= gimple_body (current_function_decl
);
86 gimple_seq lowered_body
;
87 gimple_stmt_iterator i
;
91 /* The gimplifier should've left a body of exactly one statement,
92 namely a GIMPLE_BIND. */
93 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
94 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
96 memset (&data
, 0, sizeof (data
));
97 data
.block
= DECL_INITIAL (current_function_decl
);
98 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
99 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
100 TREE_ASM_WRITTEN (data
.block
) = 1;
101 data
.return_statements
.create (8);
103 bind
= gimple_seq_first_stmt (body
);
105 gimple_seq_add_stmt (&lowered_body
, bind
);
106 i
= gsi_start (lowered_body
);
107 lower_gimple_bind (&i
, &data
);
109 i
= gsi_last (lowered_body
);
111 /* If the function falls off the end, we need a null return statement.
112 If we've already got one in the return_statements vector, we don't
113 need to do anything special. Otherwise build one by hand. */
114 bool may_fallthru
= gimple_seq_may_fallthru (lowered_body
);
116 && (data
.return_statements
.is_empty ()
117 || (gimple_return_retval (data
.return_statements
.last().stmt
)
120 x
= gimple_build_return (NULL
);
121 gimple_set_location (x
, cfun
->function_end_locus
);
122 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
123 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
124 may_fallthru
= false;
127 /* If we lowered any return statements, emit the representative
128 at the end of the function. */
129 while (!data
.return_statements
.is_empty ())
131 return_statements_t t
= data
.return_statements
.pop ();
132 x
= gimple_build_label (t
.label
);
133 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
134 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
137 /* Remove the line number from the representative return statement.
138 It now fills in for the fallthru too. Failure to remove this
139 will result in incorrect results for coverage analysis. */
140 gimple_set_location (t
.stmt
, UNKNOWN_LOCATION
);
141 may_fallthru
= false;
145 /* Once the old body has been lowered, replace it with the new
147 gimple_set_body (current_function_decl
, lowered_body
);
149 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
150 BLOCK_SUBBLOCKS (data
.block
)
151 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
153 clear_block_marks (data
.block
);
154 data
.return_statements
.release ();
160 const pass_data pass_data_lower_cf
=
162 GIMPLE_PASS
, /* type */
164 OPTGROUP_NONE
, /* optinfo_flags */
166 PROP_gimple_any
, /* properties_required */
167 PROP_gimple_lcf
, /* properties_provided */
168 0, /* properties_destroyed */
169 0, /* todo_flags_start */
170 0, /* todo_flags_finish */
173 class pass_lower_cf
: public gimple_opt_pass
176 pass_lower_cf (gcc::context
*ctxt
)
177 : gimple_opt_pass (pass_data_lower_cf
, ctxt
)
180 /* opt_pass methods: */
181 virtual unsigned int execute (function
*) { return lower_function_body (); }
183 }; // class pass_lower_cf
188 make_pass_lower_cf (gcc::context
*ctxt
)
190 return new pass_lower_cf (ctxt
);
193 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
194 when they are changed -- if this has to be done, the lowering routine must
195 do it explicitly. DATA is passed through the recursion. */
198 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
200 gimple_stmt_iterator gsi
;
202 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
203 lower_stmt (&gsi
, data
);
207 /* Lower the OpenMP directive statement pointed by GSI. DATA is
208 passed through the recursion. */
211 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
215 stmt
= gsi_stmt (*gsi
);
217 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
218 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
219 gimple_omp_set_body (stmt
, NULL
);
224 /* Lower statement GSI. DATA is passed through the recursion. We try to
225 track the fallthruness of statements and get rid of unreachable return
226 statements in order to prevent the EH lowering pass from adding useless
227 edges that can cause bogus warnings to be issued later; this guess need
228 not be 100% accurate, simply be conservative and reset cannot_fallthru
229 to false if we don't know. */
232 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
234 gimple
*stmt
= gsi_stmt (*gsi
);
236 gimple_set_block (stmt
, data
->block
);
238 switch (gimple_code (stmt
))
241 lower_gimple_bind (gsi
, data
);
242 /* Propagate fallthruness. */
248 data
->cannot_fallthru
= true;
253 if (data
->cannot_fallthru
)
255 gsi_remove (gsi
, false);
256 /* Propagate fallthruness. */
260 lower_gimple_return (gsi
, data
);
261 data
->cannot_fallthru
= true;
266 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
267 lower_try_catch (gsi
, data
);
270 /* It must be a GIMPLE_TRY_FINALLY. */
271 bool cannot_fallthru
;
272 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
273 cannot_fallthru
= data
->cannot_fallthru
;
275 /* The finally clause is always executed after the try clause,
276 so if it does not fall through, then the try-finally will not
277 fall through. Otherwise, if the try clause does not fall
278 through, then when the finally clause falls through it will
279 resume execution wherever the try clause was going. So the
280 whole try-finally will only fall through if both the try
281 clause and the finally clause fall through. */
282 data
->cannot_fallthru
= false;
283 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
284 data
->cannot_fallthru
|= cannot_fallthru
;
291 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
292 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt
), data
);
293 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt
), data
);
302 case GIMPLE_EH_MUST_NOT_THROW
:
304 case GIMPLE_OMP_SECTIONS
:
305 case GIMPLE_OMP_SECTIONS_SWITCH
:
306 case GIMPLE_OMP_SECTION
:
307 case GIMPLE_OMP_SINGLE
:
308 case GIMPLE_OMP_MASTER
:
309 case GIMPLE_OMP_TASKGROUP
:
310 case GIMPLE_OMP_ORDERED
:
311 case GIMPLE_OMP_CRITICAL
:
312 case GIMPLE_OMP_RETURN
:
313 case GIMPLE_OMP_ATOMIC_LOAD
:
314 case GIMPLE_OMP_ATOMIC_STORE
:
315 case GIMPLE_OMP_CONTINUE
:
320 tree decl
= gimple_call_fndecl (stmt
);
323 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
325 tree arg
= gimple_call_arg (stmt
, i
);
327 TREE_SET_BLOCK (arg
, data
->block
);
331 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
333 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
335 lower_builtin_setjmp (gsi
);
336 data
->cannot_fallthru
= false;
339 else if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_POSIX_MEMALIGN
341 && gimple_builtin_call_types_compatible_p (stmt
, decl
))
343 lower_builtin_posix_memalign (gsi
);
348 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
350 data
->cannot_fallthru
= true;
357 case GIMPLE_OMP_PARALLEL
:
358 case GIMPLE_OMP_TASK
:
359 case GIMPLE_OMP_TARGET
:
360 case GIMPLE_OMP_TEAMS
:
361 case GIMPLE_OMP_GRID_BODY
:
362 data
->cannot_fallthru
= false;
363 lower_omp_directive (gsi
, data
);
364 data
->cannot_fallthru
= false;
367 case GIMPLE_TRANSACTION
:
368 lower_sequence (gimple_transaction_body_ptr (
369 as_a
<gtransaction
*> (stmt
)),
377 data
->cannot_fallthru
= false;
381 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
384 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
386 tree old_block
= data
->block
;
387 gbind
*stmt
= as_a
<gbind
*> (gsi_stmt (*gsi
));
388 tree new_block
= gimple_bind_block (stmt
);
392 if (new_block
== old_block
)
394 /* The outermost block of the original function may not be the
395 outermost statement chain of the gimplified function. So we
396 may see the outermost block just inside the function. */
397 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
402 /* We do not expect to handle duplicate blocks. */
403 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
404 TREE_ASM_WRITTEN (new_block
) = 1;
406 /* Block tree may get clobbered by inlining. Normally this would
407 be fixed in rest_of_decl_compilation using block notes, but
408 since we are not going to emit them, it is up to us. */
409 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
410 BLOCK_SUBBLOCKS (old_block
) = new_block
;
411 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
412 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
414 data
->block
= new_block
;
418 record_vars (gimple_bind_vars (stmt
));
420 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
421 need gimple_bind_vars. */
423 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
424 it by marking all BLOCK_VARS. */
425 if (gimple_bind_block (stmt
))
426 for (tree t
= BLOCK_VARS (gimple_bind_block (stmt
)); t
; t
= DECL_CHAIN (t
))
427 TREE_VISITED (t
) = 1;
428 for (tree var
= gimple_bind_vars (stmt
);
429 var
&& ! TREE_VISITED (var
); var
= next
)
431 next
= DECL_CHAIN (var
);
432 DECL_CHAIN (var
) = NULL_TREE
;
434 /* Unmark BLOCK_VARS. */
435 if (gimple_bind_block (stmt
))
436 for (tree t
= BLOCK_VARS (gimple_bind_block (stmt
)); t
; t
= DECL_CHAIN (t
))
437 TREE_VISITED (t
) = 0;
439 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
443 gcc_assert (data
->block
== new_block
);
445 BLOCK_SUBBLOCKS (new_block
)
446 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
447 data
->block
= old_block
;
450 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
451 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
452 gsi_remove (gsi
, false);
455 /* Same as above, but for a GIMPLE_TRY_CATCH. */
458 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
460 bool cannot_fallthru
;
461 gimple
*stmt
= gsi_stmt (*gsi
);
462 gimple_stmt_iterator i
;
464 /* We don't handle GIMPLE_TRY_FINALLY. */
465 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
467 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
468 cannot_fallthru
= data
->cannot_fallthru
;
470 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
471 switch (gimple_code (gsi_stmt (i
)))
474 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
475 catch expression and a body. The whole try/catch may fall
476 through iff any of the catch bodies falls through. */
477 for (; !gsi_end_p (i
); gsi_next (&i
))
479 data
->cannot_fallthru
= false;
480 lower_sequence (gimple_catch_handler_ptr (
481 as_a
<gcatch
*> (gsi_stmt (i
))),
483 if (!data
->cannot_fallthru
)
484 cannot_fallthru
= false;
488 case GIMPLE_EH_FILTER
:
489 /* The exception filter expression only matters if there is an
490 exception. If the exception does not match EH_FILTER_TYPES,
491 we will execute EH_FILTER_FAILURE, and we will fall through
492 if that falls through. If the exception does match
493 EH_FILTER_TYPES, the stack unwinder will continue up the
494 stack, so we will not fall through. We don't know whether we
495 will throw an exception which matches EH_FILTER_TYPES or not,
496 so we just ignore EH_FILTER_TYPES and assume that we might
497 throw an exception which doesn't match. */
498 data
->cannot_fallthru
= false;
499 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
500 if (!data
->cannot_fallthru
)
501 cannot_fallthru
= false;
505 /* This case represents statements to be executed when an
506 exception occurs. Those statements are implicitly followed
507 by a GIMPLE_RESX to resume execution after the exception. So
508 in this case the try/catch never falls through. */
509 data
->cannot_fallthru
= false;
510 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
514 data
->cannot_fallthru
= cannot_fallthru
;
519 /* Try to determine whether a TRY_CATCH expression can fall through.
520 This is a subroutine of gimple_stmt_may_fallthru. */
523 gimple_try_catch_may_fallthru (gtry
*stmt
)
525 gimple_stmt_iterator i
;
527 /* We don't handle GIMPLE_TRY_FINALLY. */
528 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
530 /* If the TRY block can fall through, the whole TRY_CATCH can
532 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
535 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
536 switch (gimple_code (gsi_stmt (i
)))
539 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
540 catch expression and a body. The whole try/catch may fall
541 through iff any of the catch bodies falls through. */
542 for (; !gsi_end_p (i
); gsi_next (&i
))
544 if (gimple_seq_may_fallthru (gimple_catch_handler (
545 as_a
<gcatch
*> (gsi_stmt (i
)))))
550 case GIMPLE_EH_FILTER
:
551 /* The exception filter expression only matters if there is an
552 exception. If the exception does not match EH_FILTER_TYPES,
553 we will execute EH_FILTER_FAILURE, and we will fall through
554 if that falls through. If the exception does match
555 EH_FILTER_TYPES, the stack unwinder will continue up the
556 stack, so we will not fall through. We don't know whether we
557 will throw an exception which matches EH_FILTER_TYPES or not,
558 so we just ignore EH_FILTER_TYPES and assume that we might
559 throw an exception which doesn't match. */
560 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
563 /* This case represents statements to be executed when an
564 exception occurs. Those statements are implicitly followed
565 by a GIMPLE_RESX to resume execution after the exception. So
566 in this case the try/catch never falls through. */
572 /* Try to determine if we can continue executing the statement
573 immediately following STMT. This guess need not be 100% accurate;
574 simply be conservative and return true if we don't know. This is
575 used only to avoid stupidly generating extra code. If we're wrong,
576 we'll just delete the extra code later. */
579 gimple_stmt_may_fallthru (gimple
*stmt
)
584 switch (gimple_code (stmt
))
589 /* Easy cases. If the last statement of the seq implies
590 control transfer, then we can't fall through. */
594 /* Switch has already been lowered and represents a branch
595 to a selected label and hence can't fall through. */
599 /* GIMPLE_COND's are already lowered into a two-way branch. They
600 can't fall through. */
604 return gimple_seq_may_fallthru (
605 gimple_bind_body (as_a
<gbind
*> (stmt
)));
608 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
609 return gimple_try_catch_may_fallthru (as_a
<gtry
*> (stmt
));
611 /* It must be a GIMPLE_TRY_FINALLY. */
613 /* The finally clause is always executed after the try clause,
614 so if it does not fall through, then the try-finally will not
615 fall through. Otherwise, if the try clause does not fall
616 through, then when the finally clause falls through it will
617 resume execution wherever the try clause was going. So the
618 whole try-finally will only fall through if both the try
619 clause and the finally clause fall through. */
620 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
621 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
625 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
626 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt
))
627 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
632 /* Functions that do not return do not fall through. */
633 return !gimple_call_noreturn_p (stmt
);
641 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
644 gimple_seq_may_fallthru (gimple_seq seq
)
646 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
650 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
653 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
655 greturn
*stmt
= as_a
<greturn
*> (gsi_stmt (*gsi
));
658 return_statements_t tmp_rs
;
660 /* Match this up with an existing return statement that's been created. */
661 for (i
= data
->return_statements
.length () - 1;
664 tmp_rs
= data
->return_statements
[i
];
666 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
668 /* Remove the line number from the representative return statement.
669 It now fills in for many such returns. Failure to remove this
670 will result in incorrect results for coverage analysis. */
671 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
677 /* Not found. Create a new label and record the return statement. */
678 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
680 data
->return_statements
.safe_push (tmp_rs
);
682 /* Generate a goto statement and remove the return statement. */
684 /* When not optimizing, make sure user returns are preserved. */
685 if (!optimize
&& gimple_has_location (stmt
))
686 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
687 t
= gimple_build_goto (tmp_rs
.label
);
688 gimple_set_location (t
, gimple_location (stmt
));
689 gimple_set_block (t
, gimple_block (stmt
));
690 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
691 gsi_remove (gsi
, false);
694 /* Lower a __builtin_setjmp GSI.
696 __builtin_setjmp is passed a pointer to an array of five words (not
697 all will be used on all machines). It operates similarly to the C
698 library function of the same name, but is more efficient.
700 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
701 __builtin_setjmp_receiver.
703 After full lowering, the body of the function should look like:
711 __builtin_setjmp_setup (&buf, &<D1847>);
715 __builtin_setjmp_receiver (&<D1847>);
718 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
722 __builtin_setjmp_setup (&buf, &<D2847>);
726 __builtin_setjmp_receiver (&<D2847>);
729 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
737 During cfg creation an extra per-function (or per-OpenMP region)
738 block with ABNORMAL_DISPATCHER internal call will be added, unique
739 destination of all the abnormal call edges and the unique source of
740 all the abnormal edges to the receivers, thus keeping the complexity
741 explosion localized. */
744 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
746 gimple
*stmt
= gsi_stmt (*gsi
);
747 location_t loc
= gimple_location (stmt
);
748 tree cont_label
= create_artificial_label (loc
);
749 tree next_label
= create_artificial_label (loc
);
753 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
754 these builtins are modelled as non-local label jumps to the label
755 that is passed to these two builtins, so pretend we have a non-local
756 label during GIMPLE passes too. See PR60003. */
757 cfun
->has_nonlocal_label
= 1;
759 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
760 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
761 FORCED_LABEL (next_label
) = 1;
763 tree orig_dest
= dest
= gimple_call_lhs (stmt
);
764 if (orig_dest
&& TREE_CODE (orig_dest
) == SSA_NAME
)
765 dest
= create_tmp_reg (TREE_TYPE (orig_dest
));
767 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
768 arg
= build_addr (next_label
);
769 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
770 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
771 gimple_set_location (g
, loc
);
772 gimple_set_block (g
, gimple_block (stmt
));
773 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
775 /* Build 'DEST = 0' and insert. */
778 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
779 gimple_set_location (g
, loc
);
780 gimple_set_block (g
, gimple_block (stmt
));
781 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
784 /* Build 'goto CONT_LABEL' and insert. */
785 g
= gimple_build_goto (cont_label
);
786 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
788 /* Build 'NEXT_LABEL:' and insert. */
789 g
= gimple_build_label (next_label
);
790 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
792 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
793 arg
= build_addr (next_label
);
794 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
795 g
= gimple_build_call (t
, 1, arg
);
796 gimple_set_location (g
, loc
);
797 gimple_set_block (g
, gimple_block (stmt
));
798 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
800 /* Build 'DEST = 1' and insert. */
803 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
805 gimple_set_location (g
, loc
);
806 gimple_set_block (g
, gimple_block (stmt
));
807 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
810 /* Build 'CONT_LABEL:' and insert. */
811 g
= gimple_build_label (cont_label
);
812 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
814 /* Build orig_dest = dest if necessary. */
815 if (dest
!= orig_dest
)
817 g
= gimple_build_assign (orig_dest
, dest
);
818 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
821 /* Remove the call to __builtin_setjmp. */
822 gsi_remove (gsi
, false);
825 /* Lower calls to posix_memalign to
826 res = posix_memalign (ptr, align, size);
828 *ptr = __builtin_assume_aligned (*ptr, align);
831 res = posix_memalign (&tem, align, size);
833 ptr = __builtin_assume_aligned (tem, align);
834 in case the first argument was &ptr. That way we can get at the
835 alignment of the heap pointer in CCP. */
838 lower_builtin_posix_memalign (gimple_stmt_iterator
*gsi
)
840 gimple
*stmt
, *call
= gsi_stmt (*gsi
);
841 tree pptr
= gimple_call_arg (call
, 0);
842 tree align
= gimple_call_arg (call
, 1);
843 tree res
= gimple_call_lhs (call
);
844 tree ptr
= create_tmp_reg (ptr_type_node
);
845 if (TREE_CODE (pptr
) == ADDR_EXPR
)
847 tree tem
= create_tmp_var (ptr_type_node
);
848 TREE_ADDRESSABLE (tem
) = 1;
849 gimple_call_set_arg (call
, 0, build_fold_addr_expr (tem
));
850 stmt
= gimple_build_assign (ptr
, tem
);
853 stmt
= gimple_build_assign (ptr
,
854 fold_build2 (MEM_REF
, ptr_type_node
, pptr
,
855 build_int_cst (ptr_type_node
, 0)));
856 if (res
== NULL_TREE
)
858 res
= create_tmp_reg (integer_type_node
);
859 gimple_call_set_lhs (call
, res
);
861 tree align_label
= create_artificial_label (UNKNOWN_LOCATION
);
862 tree noalign_label
= create_artificial_label (UNKNOWN_LOCATION
);
863 gimple
*cond
= gimple_build_cond (EQ_EXPR
, res
, integer_zero_node
,
864 align_label
, noalign_label
);
865 gsi_insert_after (gsi
, cond
, GSI_NEW_STMT
);
866 gsi_insert_after (gsi
, gimple_build_label (align_label
), GSI_NEW_STMT
);
867 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
868 stmt
= gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED
),
870 gimple_call_set_lhs (stmt
, ptr
);
871 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
872 stmt
= gimple_build_assign (fold_build2 (MEM_REF
, ptr_type_node
, pptr
,
873 build_int_cst (ptr_type_node
, 0)),
875 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
876 gsi_insert_after (gsi
, gimple_build_label (noalign_label
), GSI_NEW_STMT
);
880 /* Record the variables in VARS into function FN. */
883 record_vars_into (tree vars
, tree fn
)
885 for (; vars
; vars
= DECL_CHAIN (vars
))
889 /* BIND_EXPRs contains also function/type/constant declarations
890 we don't need to care about. */
894 /* Nothing to do in this case. */
895 if (DECL_EXTERNAL (var
))
898 /* Record the variable. */
899 add_local_decl (DECL_STRUCT_FUNCTION (fn
), var
);
904 /* Record the variables in VARS into current_function_decl. */
907 record_vars (tree vars
)
909 record_vars_into (vars
, current_function_decl
);